6.style_GAN

PyTorch入门与实战第六课

褚则伟 zeweichu@gmail.com

目录

  • 图片风格迁移
  • 用GAN生成MNIST
  • 用DCGAN生成更复杂的图片

图片风格迁移 Neural Style Transfer

A Neural Algorithm of Artistic Style
本文介绍了Neural Style Transfor模型

Demystifying Neural Style Transfer

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
%matplotlib inline

from __future__ import division
from torchvision import models
from torchvision import transforms
from PIL import Image
import argparse
import torch
import torchvision
import torch.nn as nn
import numpy as np

import matplotlib.pyplot as plt

device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
def load_image(image_path, transform=None, max_size=None, shape=None):
image = Image.open(image_path)
if max_size:
scale = max_size / max(image.size)
size= np.array(image.size) * scale
image = image.resize(size.astype(int), Image.ANTIALIAS)

if shape:
image = image.resize(shape, Image.LANCZOS)

if transform:
image = transform(image).unsqueeze(0)

return image.to(device)


transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
]) # 来自ImageNet的mean和variance

content = load_image("png/content.png", transform, max_size=400)
stype = load_image("png/style.png", transform, shape=[content.size(2), content.size(3)])

# content = load_image("png/content.png", transforms.Compose([
# transforms.ToTensor(),
# ]), max_size=400)
# style = load_image("png/style.png", transforms.Compose([
# transforms.ToTensor(),
# ]), shape=[content.size(2), content.size(3)])
1
stype.shape
torch.Size([1, 3, 400, 272])
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
unloader = transforms.ToPILImage()  # reconvert into PIL image

plt.ion()

def imshow(tensor, title=None):
image = tensor.cpu().clone() # we clone the tensor to not do changes on it
image = image.squeeze(0) # remove the fake batch dimension
image = unloader(image)
plt.imshow(image)
if title is not None:
plt.title(title)
plt.pause(0.001) # pause a bit so that plots are updated


plt.figure()
imshow(style[0], title='Image')
# content.shape

png

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
class VGGNet(nn.Module):
def __init__(self):
super(VGGNet, self).__init__()
self.select = ['0', '5', '10', '19', '28']
self.vgg = models.vgg19(pretrained=True).features

def forward(self, x):
features = []
for name, layer in self.vgg._modules.items():
x = layer(x)
if name in self.select:
features.append(x)
return features


target = content.clone().requires_grad_(True)
optimizer = torch.optim.Adam([target], lr=0.003, betas=[0.5, 0.999])
vgg = VGGNet().to(device).eval()
1
target_features = vgg(target)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
total_step = 2000
style_weight = 100.
for step in range(total_step):
target_features = vgg(target)
content_features = vgg(content)
style_features = vgg(style)

style_loss = 0
content_loss = 0
for f1, f2, f3 in zip(target_features, content_features, style_features):
content_loss += torch.mean((f1-f2)**2)
_, c, h, w = f1.size()
f1 = f1.view(c, h*w)
f3 = f3.view(c, h*w)

# 计算gram matrix
f1 = torch.mm(f1, f1.t())
f3 = torch.mm(f3, f3.t())
style_loss += torch.mean((f1-f3)**2)/(c*h*w)

loss = content_loss + style_weight * style_loss

# 更新target
optimizer.zero_grad()
loss.backward()
optimizer.step()

if step % 10 == 0:
print("Step [{}/{}], Content Loss: {:.4f}, Style Loss: {:.4f}"
.format(step, total_step, content_loss.item(), style_loss.item()))
Step [0/2000], Content Loss: 0.0000, Style Loss: 531.1730
Step [10/2000], Content Loss: 6.0654, Style Loss: 360.6187
Step [20/2000], Content Loss: 11.3430, Style Loss: 253.8006
Step [30/2000], Content Loss: 14.5195, Style Loss: 190.0798
Step [40/2000], Content Loss: 16.5578, Style Loss: 152.3939
Step [50/2000], Content Loss: 17.9683, Style Loss: 129.4922
Step [60/2000], Content Loss: 19.0225, Style Loss: 114.5218
Step [70/2000], Content Loss: 19.8584, Style Loss: 103.7824
Step [80/2000], Content Loss: 20.5509, Style Loss: 95.5047
Step [90/2000], Content Loss: 21.1601, Style Loss: 88.7919
Step [100/2000], Content Loss: 21.6844, Style Loss: 83.1393
Step [110/2000], Content Loss: 22.1447, Style Loss: 78.2809
Step [120/2000], Content Loss: 22.5605, Style Loss: 74.0401
Step [130/2000], Content Loss: 22.9415, Style Loss: 70.2842
Step [140/2000], Content Loss: 23.2941, Style Loss: 66.9353
Step [150/2000], Content Loss: 23.6130, Style Loss: 63.9158
Step [160/2000], Content Loss: 23.9114, Style Loss: 61.1637
Step [170/2000], Content Loss: 24.1892, Style Loss: 58.6509
Step [180/2000], Content Loss: 24.4448, Style Loss: 56.3407
Step [190/2000], Content Loss: 24.6883, Style Loss: 54.1998
Step [200/2000], Content Loss: 24.9212, Style Loss: 52.2185
Step [210/2000], Content Loss: 25.1355, Style Loss: 50.3827
Step [220/2000], Content Loss: 25.3350, Style Loss: 48.6758
Step [230/2000], Content Loss: 25.5269, Style Loss: 47.0833
Step [240/2000], Content Loss: 25.7123, Style Loss: 45.5909
Step [250/2000], Content Loss: 25.8884, Style Loss: 44.1901
Step [260/2000], Content Loss: 26.0555, Style Loss: 42.8741
Step [270/2000], Content Loss: 26.2152, Style Loss: 41.6320
Step [280/2000], Content Loss: 26.3691, Style Loss: 40.4600
Step [290/2000], Content Loss: 26.5208, Style Loss: 39.3519
Step [300/2000], Content Loss: 26.6641, Style Loss: 38.3040
Step [310/2000], Content Loss: 26.8034, Style Loss: 37.3103
Step [320/2000], Content Loss: 26.9339, Style Loss: 36.3693
Step [330/2000], Content Loss: 27.0649, Style Loss: 35.4760
Step [340/2000], Content Loss: 27.1923, Style Loss: 34.6284
Step [350/2000], Content Loss: 27.3130, Style Loss: 33.8245
Step [360/2000], Content Loss: 27.4284, Style Loss: 33.0575
Step [370/2000], Content Loss: 27.5356, Style Loss: 32.3269
Step [380/2000], Content Loss: 27.6426, Style Loss: 31.6281
Step [390/2000], Content Loss: 27.7454, Style Loss: 30.9596
Step [400/2000], Content Loss: 27.8430, Style Loss: 30.3200
Step [410/2000], Content Loss: 27.9398, Style Loss: 29.7072
Step [420/2000], Content Loss: 28.0368, Style Loss: 29.1180
Step [430/2000], Content Loss: 28.1289, Style Loss: 28.5518
Step [440/2000], Content Loss: 28.2207, Style Loss: 28.0077
Step [450/2000], Content Loss: 28.3101, Style Loss: 27.4842
Step [460/2000], Content Loss: 28.4016, Style Loss: 26.9804
Step [470/2000], Content Loss: 28.4844, Style Loss: 26.4949
Step [480/2000], Content Loss: 28.5667, Style Loss: 26.0286
Step [490/2000], Content Loss: 28.6440, Style Loss: 25.5799
Step [500/2000], Content Loss: 28.7183, Style Loss: 25.1476
Step [510/2000], Content Loss: 28.7939, Style Loss: 24.7302
Step [520/2000], Content Loss: 28.8708, Style Loss: 24.3261
Step [530/2000], Content Loss: 28.9440, Style Loss: 23.9349
Step [540/2000], Content Loss: 29.0163, Style Loss: 23.5566
Step [550/2000], Content Loss: 29.0864, Style Loss: 23.1890
Step [560/2000], Content Loss: 29.1529, Style Loss: 22.8329
Step [570/2000], Content Loss: 29.2189, Style Loss: 22.4880
Step [580/2000], Content Loss: 29.2833, Style Loss: 22.1529
Step [590/2000], Content Loss: 29.3477, Style Loss: 21.8286
Step [600/2000], Content Loss: 29.4093, Style Loss: 21.5141
Step [610/2000], Content Loss: 29.4694, Style Loss: 21.2083
Step [620/2000], Content Loss: 29.5252, Style Loss: 20.9107
Step [630/2000], Content Loss: 29.5821, Style Loss: 20.6206
Step [640/2000], Content Loss: 29.6378, Style Loss: 20.3381
Step [650/2000], Content Loss: 29.6938, Style Loss: 20.0623
Step [660/2000], Content Loss: 29.7449, Style Loss: 19.7930
Step [670/2000], Content Loss: 29.7975, Style Loss: 19.5310
Step [680/2000], Content Loss: 29.8479, Style Loss: 19.2760
Step [690/2000], Content Loss: 29.8950, Style Loss: 19.0278
Step [700/2000], Content Loss: 29.9427, Style Loss: 18.7856
Step [710/2000], Content Loss: 29.9889, Style Loss: 18.5502
Step [720/2000], Content Loss: 30.0369, Style Loss: 18.3209
Step [730/2000], Content Loss: 30.0841, Style Loss: 18.0967
Step [740/2000], Content Loss: 30.1312, Style Loss: 17.8776
Step [750/2000], Content Loss: 30.1793, Style Loss: 17.6630
Step [760/2000], Content Loss: 30.2209, Style Loss: 17.4535
Step [770/2000], Content Loss: 30.2625, Style Loss: 17.2486
Step [780/2000], Content Loss: 30.3043, Style Loss: 17.0483
Step [790/2000], Content Loss: 30.3472, Style Loss: 16.8526
Step [800/2000], Content Loss: 30.3883, Style Loss: 16.6612
Step [810/2000], Content Loss: 30.4279, Style Loss: 16.4737
Step [820/2000], Content Loss: 30.4663, Style Loss: 16.2899
Step [830/2000], Content Loss: 30.5036, Style Loss: 16.1099
Step [840/2000], Content Loss: 30.5427, Style Loss: 15.9336
Step [850/2000], Content Loss: 30.5801, Style Loss: 15.7608
Step [860/2000], Content Loss: 30.6190, Style Loss: 15.5913
Step [870/2000], Content Loss: 30.6561, Style Loss: 15.4249
Step [880/2000], Content Loss: 30.6927, Style Loss: 15.2619
Step [890/2000], Content Loss: 30.7275, Style Loss: 15.1023
Step [900/2000], Content Loss: 30.7620, Style Loss: 14.9457
Step [910/2000], Content Loss: 30.7954, Style Loss: 14.7917
Step [920/2000], Content Loss: 30.8298, Style Loss: 14.6399
Step [930/2000], Content Loss: 30.8670, Style Loss: 14.4906
Step [940/2000], Content Loss: 30.9016, Style Loss: 14.3440
Step [950/2000], Content Loss: 30.9369, Style Loss: 14.1998
Step [960/2000], Content Loss: 30.9720, Style Loss: 14.0581
Step [970/2000], Content Loss: 31.0021, Style Loss: 13.9193
Step [980/2000], Content Loss: 31.0370, Style Loss: 13.7825
Step [990/2000], Content Loss: 31.0691, Style Loss: 13.6480
Step [1000/2000], Content Loss: 31.0998, Style Loss: 13.5158
Step [1010/2000], Content Loss: 31.1302, Style Loss: 13.3861
Step [1020/2000], Content Loss: 31.1605, Style Loss: 13.2587
Step [1030/2000], Content Loss: 31.1915, Style Loss: 13.1332
Step [1040/2000], Content Loss: 31.2220, Style Loss: 13.0099
Step [1050/2000], Content Loss: 31.2528, Style Loss: 12.8889
Step [1060/2000], Content Loss: 31.2860, Style Loss: 12.7697
Step [1070/2000], Content Loss: 31.3174, Style Loss: 12.6525
Step [1080/2000], Content Loss: 31.3475, Style Loss: 12.5375
Step [1090/2000], Content Loss: 31.3775, Style Loss: 12.4245
Step [1100/2000], Content Loss: 31.4046, Style Loss: 12.3129
Step [1110/2000], Content Loss: 31.4350, Style Loss: 12.2038
Step [1120/2000], Content Loss: 31.4598, Style Loss: 12.0956
Step [1130/2000], Content Loss: 31.4878, Style Loss: 11.9894
Step [1140/2000], Content Loss: 31.5149, Style Loss: 11.8847
Step [1150/2000], Content Loss: 31.5406, Style Loss: 11.7818
Step [1160/2000], Content Loss: 31.5659, Style Loss: 11.6805
Step [1170/2000], Content Loss: 31.5901, Style Loss: 11.5803
Step [1180/2000], Content Loss: 31.6137, Style Loss: 11.4822
Step [1190/2000], Content Loss: 31.6345, Style Loss: 11.3851
Step [1200/2000], Content Loss: 31.6543, Style Loss: 11.2900
Step [1210/2000], Content Loss: 31.6787, Style Loss: 11.1968
Step [1220/2000], Content Loss: 31.7000, Style Loss: 11.1037
Step [1230/2000], Content Loss: 31.7205, Style Loss: 11.0116
Step [1240/2000], Content Loss: 31.7422, Style Loss: 10.9210
Step [1250/2000], Content Loss: 31.7633, Style Loss: 10.8319
Step [1260/2000], Content Loss: 31.7867, Style Loss: 10.7446
Step [1270/2000], Content Loss: 31.8046, Style Loss: 10.6565
Step [1280/2000], Content Loss: 31.8247, Style Loss: 10.5699
Step [1290/2000], Content Loss: 31.8469, Style Loss: 10.4858
Step [1300/2000], Content Loss: 31.8646, Style Loss: 10.4015
Step [1310/2000], Content Loss: 31.8859, Style Loss: 10.3201
Step [1320/2000], Content Loss: 31.9010, Style Loss: 10.2365
Step [1330/2000], Content Loss: 31.9236, Style Loss: 10.1575
Step [1340/2000], Content Loss: 31.9461, Style Loss: 10.0792
Step [1350/2000], Content Loss: 31.9616, Style Loss: 9.9980
Step [1360/2000], Content Loss: 31.9880, Style Loss: 9.9236
Step [1370/2000], Content Loss: 32.0038, Style Loss: 9.8461
Step [1380/2000], Content Loss: 32.0191, Style Loss: 9.7687
Step [1390/2000], Content Loss: 32.0434, Style Loss: 9.6970
Step [1400/2000], Content Loss: 32.0572, Style Loss: 9.6203
Step [1410/2000], Content Loss: 32.0787, Style Loss: 9.5496
Step [1420/2000], Content Loss: 32.0955, Style Loss: 9.4771
Step [1430/2000], Content Loss: 32.1123, Style Loss: 9.4056
Step [1440/2000], Content Loss: 32.1289, Style Loss: 9.3349
Step [1450/2000], Content Loss: 32.1441, Style Loss: 9.2636
Step [1460/2000], Content Loss: 32.1628, Style Loss: 9.1949
Step [1470/2000], Content Loss: 32.1851, Style Loss: 9.1302
Step [1480/2000], Content Loss: 32.1958, Style Loss: 9.0589
Step [1490/2000], Content Loss: 32.2141, Style Loss: 8.9938
Step [1500/2000], Content Loss: 32.2303, Style Loss: 8.9282
Step [1510/2000], Content Loss: 32.2414, Style Loss: 8.8597
Step [1520/2000], Content Loss: 32.2560, Style Loss: 8.7944
Step [1530/2000], Content Loss: 32.2785, Style Loss: 8.7337
Step [1540/2000], Content Loss: 32.2986, Style Loss: 8.6751
Step [1550/2000], Content Loss: 32.2955, Style Loss: 8.6001
Step [1560/2000], Content Loss: 32.3232, Style Loss: 8.5438
Step [1570/2000], Content Loss: 32.3409, Style Loss: 8.4860
Step [1580/2000], Content Loss: 32.3442, Style Loss: 8.4177
Step [1590/2000], Content Loss: 32.3604, Style Loss: 8.3581
Step [1600/2000], Content Loss: 32.3871, Style Loss: 8.3062
Step [1610/2000], Content Loss: 32.3841, Style Loss: 8.2353
Step [1620/2000], Content Loss: 32.4114, Style Loss: 8.1829
Step [1630/2000], Content Loss: 32.4267, Style Loss: 8.1247
Step [1640/2000], Content Loss: 32.4401, Style Loss: 8.0669
Step [1650/2000], Content Loss: 32.4480, Style Loss: 8.0066
Step [1660/2000], Content Loss: 32.4796, Style Loss: 7.9656
Step [1670/2000], Content Loss: 32.4754, Style Loss: 7.8967
Step [1680/2000], Content Loss: 32.4839, Style Loss: 7.8374
Step [1690/2000], Content Loss: 32.5063, Style Loss: 7.7878
Step [1700/2000], Content Loss: 32.5246, Style Loss: 7.7381
Step [1710/2000], Content Loss: 32.5257, Style Loss: 7.6759
Step [1720/2000], Content Loss: 32.5456, Style Loss: 7.6262
Step [1730/2000], Content Loss: 32.5680, Style Loss: 7.5811
Step [1740/2000], Content Loss: 32.5655, Style Loss: 7.5176
Step [1750/2000], Content Loss: 32.5831, Style Loss: 7.4672
Step [1760/2000], Content Loss: 32.6070, Style Loss: 7.4232
Step [1770/2000], Content Loss: 32.6441, Style Loss: 7.4071
Step [1780/2000], Content Loss: 32.6931, Style Loss: 7.4527
Step [1790/2000], Content Loss: 32.7056, Style Loss: 7.4441
Step [1800/2000], Content Loss: 32.6304, Style Loss: 7.2250
Step [1810/2000], Content Loss: 32.6647, Style Loss: 7.1710
Step [1820/2000], Content Loss: 32.6658, Style Loss: 7.1150
Step [1830/2000], Content Loss: 32.6795, Style Loss: 7.0659
Step [1840/2000], Content Loss: 32.6897, Style Loss: 7.0176
Step [1850/2000], Content Loss: 32.7024, Style Loss: 6.9711
Step [1860/2000], Content Loss: 32.7121, Style Loss: 6.9235
Step [1870/2000], Content Loss: 32.7327, Style Loss: 6.8816
Step [1880/2000], Content Loss: 32.7356, Style Loss: 6.8324
Step [1890/2000], Content Loss: 32.7485, Style Loss: 6.7878
Step [1900/2000], Content Loss: 32.7634, Style Loss: 6.7444
Step [1910/2000], Content Loss: 32.7753, Style Loss: 6.6990
Step [1920/2000], Content Loss: 32.7872, Style Loss: 6.6547
Step [1930/2000], Content Loss: 32.8038, Style Loss: 6.6145
Step [1940/2000], Content Loss: 32.8169, Style Loss: 6.5722
Step [1950/2000], Content Loss: 32.8173, Style Loss: 6.5240
Step [1960/2000], Content Loss: 32.8359, Style Loss: 6.4847
Step [1970/2000], Content Loss: 32.8538, Style Loss: 6.4470
Step [1980/2000], Content Loss: 32.8599, Style Loss: 6.4017
Step [1990/2000], Content Loss: 32.8634, Style Loss: 6.3566
1
2
3
4
5
denorm = transforms.Normalize((-2.12, -2.04, -1.80), (4.37, 4.46, 4.44))
img = target.clone().squeeze()
img = denorm(img).clamp_(0, 1)
plt.figure()
imshow(img, title='Target Image')

png

Generative Adversarial Networks

1
2
3
4
5
6
7
8
9
10
11
batch_size=32
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=(0.5, 0.5, 0.5),
std=(0.5, 0.5, 0.5))
])

mnist_data = torchvision.datasets.MNIST("./mnist_data", train=True, download=True, transform=transform)
dataloader = torch.utils.data.DataLoader(dataset=mnist_data,
batch_size=batch_size,
shuffle=True)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
image_size = 784

hidden_size = 256
# discriminator
D = nn.Sequential(
nn.Linear(image_size, hidden_size),
nn.LeakyReLU(0.2),
nn.Linear(hidden_size, hidden_size),
nn.LeakyReLU(0.2),
nn.Linear(hidden_size, 1),
nn.Sigmoid()
)

latent_size = 64
# Generator
G = nn.Sequential(
nn.Linear(latent_size, hidden_size),
nn.ReLU(),
nn.Linear(hidden_size, hidden_size),
nn.ReLU(),
nn.Linear(hidden_size, image_size),
nn.Tanh()
)

D = D.to(device)
G = G.to(device)

loss_fn = nn.BCELoss()
d_optimizer = torch.optim.Adam(D.parameters(), lr=0.0002)
g_optimizer = torch.optim.Adam(G.parameters(), lr=0.0002)

开始训练

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45

def reset_grad():
d_optimizer.zero_grad()
g_optimizer.zero_grad()

total_step = len(dataloader)
num_epochs = 200
for epoch in range(num_epochs):
for i, (images, _) in enumerate(dataloader):
batch_size = images.size(0)
images = images.reshape(batch_size, image_size).to(device)

real_labels = torch.ones(batch_size, 1).to(device)
fake_labels = torch.zeros(batch_size, 1).to(device)

outputs = D(images)
d_loss_real = loss_fn(outputs, real_labels)
real_score = outputs

# 开始生成fake images
z = torch.randn(batch_size, latent_size).to(device)
fake_images = G(z)
outputs = D(fake_images.detach())
d_loss_fake = loss_fn(outputs, fake_labels)
fake_score = outputs

# 开始优化discriminator
d_loss = d_loss_real + d_loss_fake
reset_grad()
d_loss.backward()
d_optimizer.step()

# 开始优化generator
z = torch.randn(batch_size, latent_size).to(device)
fake_images = G(z)
outputs = D(fake_images)
g_loss = loss_fn(outputs, real_labels)

reset_grad()
g_loss.backward()
g_optimizer.step()

if i % 1000 == 0:
print("Epoch [{}/{}], Step [{}/{}], d_loss: {:.4f}, g_loss: {:.4f}, D(x): {:.2f}, D(G(z)): {:.2f}"
.format(epoch, num_epochs, i, total_step, d_loss.item(), g_loss.item(), real_score.mean().item(), fake_score.mean().item()))
Epoch [0/200], Step [0/1875], d_loss: 0.6669, g_loss: 2.9577, D(x): 0.76, D(G(z)): 0.15
Epoch [0/200], Step [1000/1875], d_loss: 0.1716, g_loss: 3.0008, D(x): 0.93, D(G(z)): 0.09
Epoch [1/200], Step [0/1875], d_loss: 0.1716, g_loss: 4.1396, D(x): 0.93, D(G(z)): 0.02
Epoch [1/200], Step [1000/1875], d_loss: 0.0202, g_loss: 5.1296, D(x): 1.00, D(G(z)): 0.02
Epoch [2/200], Step [0/1875], d_loss: 0.2070, g_loss: 3.7713, D(x): 0.93, D(G(z)): 0.08
Epoch [2/200], Step [1000/1875], d_loss: 0.0829, g_loss: 4.9163, D(x): 0.99, D(G(z)): 0.07
Epoch [3/200], Step [0/1875], d_loss: 0.2986, g_loss: 3.6197, D(x): 0.90, D(G(z)): 0.03
Epoch [3/200], Step [1000/1875], d_loss: 0.4204, g_loss: 2.2956, D(x): 0.90, D(G(z)): 0.14
Epoch [4/200], Step [0/1875], d_loss: 0.4453, g_loss: 5.1677, D(x): 0.80, D(G(z)): 0.02
Epoch [4/200], Step [1000/1875], d_loss: 0.1900, g_loss: 2.7722, D(x): 0.93, D(G(z)): 0.10
Epoch [5/200], Step [0/1875], d_loss: 0.3418, g_loss: 2.4469, D(x): 1.00, D(G(z)): 0.21
Epoch [5/200], Step [1000/1875], d_loss: 0.4460, g_loss: 2.4152, D(x): 0.90, D(G(z)): 0.18
Epoch [6/200], Step [0/1875], d_loss: 0.3142, g_loss: 4.0145, D(x): 0.93, D(G(z)): 0.13
Epoch [6/200], Step [1000/1875], d_loss: 0.5893, g_loss: 3.9873, D(x): 0.97, D(G(z)): 0.31
Epoch [7/200], Step [0/1875], d_loss: 0.3118, g_loss: 3.2590, D(x): 0.88, D(G(z)): 0.10
Epoch [7/200], Step [1000/1875], d_loss: 0.5169, g_loss: 2.8562, D(x): 0.84, D(G(z)): 0.20
Epoch [8/200], Step [0/1875], d_loss: 0.1886, g_loss: 3.0765, D(x): 0.93, D(G(z)): 0.05
Epoch [8/200], Step [1000/1875], d_loss: 0.5987, g_loss: 3.0972, D(x): 0.86, D(G(z)): 0.17
Epoch [9/200], Step [0/1875], d_loss: 0.7312, g_loss: 2.5704, D(x): 0.93, D(G(z)): 0.30
Epoch [9/200], Step [1000/1875], d_loss: 0.2202, g_loss: 3.1345, D(x): 0.94, D(G(z)): 0.11
Epoch [10/200], Step [0/1875], d_loss: 0.5448, g_loss: 3.2835, D(x): 0.81, D(G(z)): 0.11
Epoch [10/200], Step [1000/1875], d_loss: 0.4599, g_loss: 2.8296, D(x): 0.81, D(G(z)): 0.09
Epoch [11/200], Step [0/1875], d_loss: 0.3990, g_loss: 3.9110, D(x): 0.86, D(G(z)): 0.11
Epoch [11/200], Step [1000/1875], d_loss: 0.4137, g_loss: 3.2849, D(x): 0.88, D(G(z)): 0.17
Epoch [12/200], Step [0/1875], d_loss: 0.6989, g_loss: 2.1561, D(x): 0.80, D(G(z)): 0.24
Epoch [12/200], Step [1000/1875], d_loss: 0.7982, g_loss: 2.6202, D(x): 0.75, D(G(z)): 0.27
Epoch [13/200], Step [0/1875], d_loss: 0.7775, g_loss: 2.6229, D(x): 0.70, D(G(z)): 0.09
Epoch [13/200], Step [1000/1875], d_loss: 0.7904, g_loss: 2.3377, D(x): 0.69, D(G(z)): 0.06
Epoch [14/200], Step [0/1875], d_loss: 0.5520, g_loss: 3.6026, D(x): 0.87, D(G(z)): 0.23
Epoch [14/200], Step [1000/1875], d_loss: 0.4877, g_loss: 1.8566, D(x): 0.81, D(G(z)): 0.11
Epoch [15/200], Step [0/1875], d_loss: 0.6178, g_loss: 2.8264, D(x): 0.73, D(G(z)): 0.08
Epoch [15/200], Step [1000/1875], d_loss: 0.5656, g_loss: 2.0427, D(x): 0.85, D(G(z)): 0.23
Epoch [16/200], Step [0/1875], d_loss: 0.7704, g_loss: 1.8280, D(x): 0.82, D(G(z)): 0.28
Epoch [16/200], Step [1000/1875], d_loss: 0.4717, g_loss: 2.3330, D(x): 0.87, D(G(z)): 0.23
Epoch [17/200], Step [0/1875], d_loss: 0.6158, g_loss: 2.3867, D(x): 0.80, D(G(z)): 0.21
Epoch [17/200], Step [1000/1875], d_loss: 0.5036, g_loss: 2.1572, D(x): 0.86, D(G(z)): 0.22
Epoch [18/200], Step [0/1875], d_loss: 0.2080, g_loss: 3.1542, D(x): 0.97, D(G(z)): 0.13
Epoch [18/200], Step [1000/1875], d_loss: 0.4262, g_loss: 3.2852, D(x): 0.85, D(G(z)): 0.12
Epoch [19/200], Step [0/1875], d_loss: 1.1834, g_loss: 1.7200, D(x): 0.82, D(G(z)): 0.44
Epoch [19/200], Step [1000/1875], d_loss: 0.7412, g_loss: 3.3823, D(x): 0.70, D(G(z)): 0.14
Epoch [20/200], Step [0/1875], d_loss: 0.8160, g_loss: 2.5552, D(x): 0.78, D(G(z)): 0.28
Epoch [20/200], Step [1000/1875], d_loss: 0.8000, g_loss: 1.9645, D(x): 0.82, D(G(z)): 0.31
Epoch [21/200], Step [0/1875], d_loss: 0.8578, g_loss: 2.7063, D(x): 0.70, D(G(z)): 0.24
Epoch [21/200], Step [1000/1875], d_loss: 0.4567, g_loss: 1.8023, D(x): 0.83, D(G(z)): 0.18
Epoch [22/200], Step [0/1875], d_loss: 0.6396, g_loss: 1.9526, D(x): 0.76, D(G(z)): 0.20
Epoch [22/200], Step [1000/1875], d_loss: 0.4177, g_loss: 2.4358, D(x): 0.89, D(G(z)): 0.18
Epoch [23/200], Step [0/1875], d_loss: 0.7560, g_loss: 2.3783, D(x): 0.83, D(G(z)): 0.36
Epoch [23/200], Step [1000/1875], d_loss: 0.8418, g_loss: 1.2812, D(x): 0.72, D(G(z)): 0.22
Epoch [24/200], Step [0/1875], d_loss: 0.8319, g_loss: 2.1962, D(x): 0.66, D(G(z)): 0.20
Epoch [24/200], Step [1000/1875], d_loss: 0.8614, g_loss: 2.3836, D(x): 0.67, D(G(z)): 0.20
Epoch [25/200], Step [0/1875], d_loss: 0.8590, g_loss: 1.5315, D(x): 0.78, D(G(z)): 0.36
Epoch [25/200], Step [1000/1875], d_loss: 0.9564, g_loss: 1.7998, D(x): 0.77, D(G(z)): 0.38
Epoch [26/200], Step [0/1875], d_loss: 0.8937, g_loss: 1.3713, D(x): 0.74, D(G(z)): 0.31
Epoch [26/200], Step [1000/1875], d_loss: 0.9061, g_loss: 2.4561, D(x): 0.74, D(G(z)): 0.27
Epoch [27/200], Step [0/1875], d_loss: 0.6779, g_loss: 2.1518, D(x): 0.76, D(G(z)): 0.23
Epoch [27/200], Step [1000/1875], d_loss: 1.0955, g_loss: 1.9235, D(x): 0.70, D(G(z)): 0.31
Epoch [28/200], Step [0/1875], d_loss: 0.7943, g_loss: 1.5614, D(x): 0.73, D(G(z)): 0.24
Epoch [28/200], Step [1000/1875], d_loss: 0.8096, g_loss: 1.8443, D(x): 0.86, D(G(z)): 0.40
Epoch [29/200], Step [0/1875], d_loss: 0.6123, g_loss: 1.8900, D(x): 0.80, D(G(z)): 0.23
Epoch [29/200], Step [1000/1875], d_loss: 0.9214, g_loss: 1.5088, D(x): 0.79, D(G(z)): 0.38
Epoch [30/200], Step [0/1875], d_loss: 1.1502, g_loss: 1.2392, D(x): 0.63, D(G(z)): 0.31
Epoch [30/200], Step [1000/1875], d_loss: 0.7820, g_loss: 1.2615, D(x): 0.81, D(G(z)): 0.35
Epoch [31/200], Step [0/1875], d_loss: 0.9985, g_loss: 1.9074, D(x): 0.63, D(G(z)): 0.23
Epoch [31/200], Step [1000/1875], d_loss: 0.7422, g_loss: 1.5258, D(x): 0.72, D(G(z)): 0.26
Epoch [32/200], Step [0/1875], d_loss: 0.9283, g_loss: 2.1753, D(x): 0.60, D(G(z)): 0.20
Epoch [32/200], Step [1000/1875], d_loss: 0.6156, g_loss: 1.8300, D(x): 0.88, D(G(z)): 0.34
Epoch [33/200], Step [0/1875], d_loss: 0.7572, g_loss: 2.5281, D(x): 0.69, D(G(z)): 0.20
Epoch [33/200], Step [1000/1875], d_loss: 1.2556, g_loss: 1.6872, D(x): 0.58, D(G(z)): 0.31
Epoch [34/200], Step [0/1875], d_loss: 0.9278, g_loss: 1.6144, D(x): 0.77, D(G(z)): 0.37
Epoch [34/200], Step [1000/1875], d_loss: 1.0190, g_loss: 1.9249, D(x): 0.65, D(G(z)): 0.31
Epoch [35/200], Step [0/1875], d_loss: 1.1411, g_loss: 1.3005, D(x): 0.79, D(G(z)): 0.47
Epoch [35/200], Step [1000/1875], d_loss: 0.9863, g_loss: 0.9696, D(x): 0.81, D(G(z)): 0.45
Epoch [36/200], Step [0/1875], d_loss: 0.6408, g_loss: 1.7086, D(x): 0.77, D(G(z)): 0.24
Epoch [36/200], Step [1000/1875], d_loss: 0.8755, g_loss: 1.4808, D(x): 0.71, D(G(z)): 0.31
Epoch [37/200], Step [0/1875], d_loss: 0.8984, g_loss: 1.3038, D(x): 0.77, D(G(z)): 0.37
Epoch [37/200], Step [1000/1875], d_loss: 0.8318, g_loss: 1.4391, D(x): 0.73, D(G(z)): 0.29
Epoch [38/200], Step [0/1875], d_loss: 0.6922, g_loss: 1.8307, D(x): 0.77, D(G(z)): 0.27
Epoch [38/200], Step [1000/1875], d_loss: 1.1070, g_loss: 1.1424, D(x): 0.71, D(G(z)): 0.45
Epoch [39/200], Step [0/1875], d_loss: 0.8160, g_loss: 1.7084, D(x): 0.79, D(G(z)): 0.31
Epoch [39/200], Step [1000/1875], d_loss: 0.7833, g_loss: 1.5914, D(x): 0.69, D(G(z)): 0.16
Epoch [40/200], Step [0/1875], d_loss: 1.1307, g_loss: 1.1723, D(x): 0.72, D(G(z)): 0.42
Epoch [40/200], Step [1000/1875], d_loss: 0.9260, g_loss: 1.5115, D(x): 0.58, D(G(z)): 0.19
Epoch [41/200], Step [0/1875], d_loss: 0.8279, g_loss: 2.0445, D(x): 0.71, D(G(z)): 0.26
Epoch [41/200], Step [1000/1875], d_loss: 1.0122, g_loss: 1.4877, D(x): 0.68, D(G(z)): 0.34
Epoch [42/200], Step [0/1875], d_loss: 1.0094, g_loss: 1.5560, D(x): 0.67, D(G(z)): 0.30
Epoch [42/200], Step [1000/1875], d_loss: 1.1574, g_loss: 1.0871, D(x): 0.80, D(G(z)): 0.48
Epoch [43/200], Step [0/1875], d_loss: 0.7671, g_loss: 1.4075, D(x): 0.72, D(G(z)): 0.23
Epoch [43/200], Step [1000/1875], d_loss: 0.8994, g_loss: 1.4649, D(x): 0.69, D(G(z)): 0.28
Epoch [44/200], Step [0/1875], d_loss: 0.8590, g_loss: 1.2829, D(x): 0.75, D(G(z)): 0.35
Epoch [44/200], Step [1000/1875], d_loss: 0.8026, g_loss: 2.1658, D(x): 0.64, D(G(z)): 0.18
Epoch [45/200], Step [0/1875], d_loss: 1.1981, g_loss: 1.5492, D(x): 0.65, D(G(z)): 0.37
Epoch [45/200], Step [1000/1875], d_loss: 1.0184, g_loss: 1.2799, D(x): 0.68, D(G(z)): 0.37
Epoch [46/200], Step [0/1875], d_loss: 0.7981, g_loss: 2.0579, D(x): 0.71, D(G(z)): 0.26
Epoch [46/200], Step [1000/1875], d_loss: 1.1051, g_loss: 1.2950, D(x): 0.63, D(G(z)): 0.28
Epoch [47/200], Step [0/1875], d_loss: 0.9363, g_loss: 1.2712, D(x): 0.64, D(G(z)): 0.26
Epoch [47/200], Step [1000/1875], d_loss: 0.7284, g_loss: 1.2780, D(x): 0.82, D(G(z)): 0.36
Epoch [48/200], Step [0/1875], d_loss: 0.9353, g_loss: 1.6880, D(x): 0.76, D(G(z)): 0.41
Epoch [48/200], Step [1000/1875], d_loss: 0.9996, g_loss: 1.7311, D(x): 0.70, D(G(z)): 0.32
Epoch [49/200], Step [0/1875], d_loss: 0.9926, g_loss: 1.4112, D(x): 0.78, D(G(z)): 0.42
Epoch [49/200], Step [1000/1875], d_loss: 0.8023, g_loss: 1.6557, D(x): 0.65, D(G(z)): 0.21
Epoch [50/200], Step [0/1875], d_loss: 0.8718, g_loss: 1.9058, D(x): 0.63, D(G(z)): 0.20
Epoch [50/200], Step [1000/1875], d_loss: 0.9961, g_loss: 1.5768, D(x): 0.62, D(G(z)): 0.28
Epoch [51/200], Step [0/1875], d_loss: 0.9317, g_loss: 1.3332, D(x): 0.70, D(G(z)): 0.33
Epoch [51/200], Step [1000/1875], d_loss: 0.9427, g_loss: 1.1736, D(x): 0.68, D(G(z)): 0.32
Epoch [52/200], Step [0/1875], d_loss: 0.7741, g_loss: 1.6549, D(x): 0.74, D(G(z)): 0.29
Epoch [52/200], Step [1000/1875], d_loss: 1.2812, g_loss: 1.1068, D(x): 0.71, D(G(z)): 0.49
Epoch [53/200], Step [0/1875], d_loss: 0.8245, g_loss: 1.5040, D(x): 0.73, D(G(z)): 0.28
Epoch [53/200], Step [1000/1875], d_loss: 1.0251, g_loss: 1.2684, D(x): 0.80, D(G(z)): 0.44
Epoch [54/200], Step [0/1875], d_loss: 1.1557, g_loss: 1.8746, D(x): 0.67, D(G(z)): 0.35
Epoch [54/200], Step [1000/1875], d_loss: 1.1738, g_loss: 1.6428, D(x): 0.57, D(G(z)): 0.33
Epoch [55/200], Step [0/1875], d_loss: 1.0400, g_loss: 1.3476, D(x): 0.55, D(G(z)): 0.21
Epoch [55/200], Step [1000/1875], d_loss: 1.0220, g_loss: 1.4821, D(x): 0.59, D(G(z)): 0.24
Epoch [56/200], Step [0/1875], d_loss: 0.7882, g_loss: 1.4944, D(x): 0.66, D(G(z)): 0.20
Epoch [56/200], Step [1000/1875], d_loss: 0.8876, g_loss: 1.5311, D(x): 0.73, D(G(z)): 0.34
Epoch [57/200], Step [0/1875], d_loss: 1.0530, g_loss: 1.7741, D(x): 0.69, D(G(z)): 0.36
Epoch [57/200], Step [1000/1875], d_loss: 1.1232, g_loss: 1.5487, D(x): 0.62, D(G(z)): 0.33
Epoch [58/200], Step [0/1875], d_loss: 1.0350, g_loss: 1.3535, D(x): 0.73, D(G(z)): 0.43
Epoch [58/200], Step [1000/1875], d_loss: 0.7528, g_loss: 1.4546, D(x): 0.74, D(G(z)): 0.28
Epoch [59/200], Step [0/1875], d_loss: 0.9243, g_loss: 1.3529, D(x): 0.71, D(G(z)): 0.31
Epoch [59/200], Step [1000/1875], d_loss: 1.0429, g_loss: 1.6492, D(x): 0.64, D(G(z)): 0.36
Epoch [60/200], Step [0/1875], d_loss: 0.9420, g_loss: 1.4876, D(x): 0.68, D(G(z)): 0.31
Epoch [60/200], Step [1000/1875], d_loss: 1.0196, g_loss: 1.6513, D(x): 0.67, D(G(z)): 0.34
Epoch [61/200], Step [0/1875], d_loss: 1.0662, g_loss: 1.4362, D(x): 0.64, D(G(z)): 0.29
Epoch [61/200], Step [1000/1875], d_loss: 1.1993, g_loss: 1.2304, D(x): 0.55, D(G(z)): 0.34
Epoch [62/200], Step [0/1875], d_loss: 1.1418, g_loss: 1.6582, D(x): 0.57, D(G(z)): 0.27
Epoch [62/200], Step [1000/1875], d_loss: 1.1739, g_loss: 1.0282, D(x): 0.72, D(G(z)): 0.50
Epoch [63/200], Step [0/1875], d_loss: 0.9645, g_loss: 1.2030, D(x): 0.67, D(G(z)): 0.32
Epoch [63/200], Step [1000/1875], d_loss: 1.0324, g_loss: 1.8831, D(x): 0.63, D(G(z)): 0.30
Epoch [64/200], Step [0/1875], d_loss: 1.2073, g_loss: 1.2013, D(x): 0.60, D(G(z)): 0.37
Epoch [64/200], Step [1000/1875], d_loss: 1.3382, g_loss: 1.4971, D(x): 0.69, D(G(z)): 0.47
Epoch [65/200], Step [0/1875], d_loss: 0.7616, g_loss: 1.4244, D(x): 0.79, D(G(z)): 0.33
Epoch [65/200], Step [1000/1875], d_loss: 0.9834, g_loss: 1.9160, D(x): 0.60, D(G(z)): 0.24
Epoch [66/200], Step [0/1875], d_loss: 0.9860, g_loss: 1.2135, D(x): 0.71, D(G(z)): 0.36
Epoch [66/200], Step [1000/1875], d_loss: 1.1599, g_loss: 1.9320, D(x): 0.56, D(G(z)): 0.24
Epoch [67/200], Step [0/1875], d_loss: 0.9280, g_loss: 1.6222, D(x): 0.62, D(G(z)): 0.25
Epoch [67/200], Step [1000/1875], d_loss: 0.8609, g_loss: 1.2151, D(x): 0.72, D(G(z)): 0.34
Epoch [68/200], Step [0/1875], d_loss: 1.1169, g_loss: 1.2863, D(x): 0.64, D(G(z)): 0.35
Epoch [68/200], Step [1000/1875], d_loss: 1.3884, g_loss: 1.1648, D(x): 0.80, D(G(z)): 0.59
Epoch [69/200], Step [0/1875], d_loss: 0.7709, g_loss: 1.5080, D(x): 0.72, D(G(z)): 0.29
Epoch [69/200], Step [1000/1875], d_loss: 0.9492, g_loss: 1.4181, D(x): 0.67, D(G(z)): 0.29
Epoch [70/200], Step [0/1875], d_loss: 0.8738, g_loss: 1.2650, D(x): 0.74, D(G(z)): 0.36
Epoch [70/200], Step [1000/1875], d_loss: 1.0756, g_loss: 1.4710, D(x): 0.74, D(G(z)): 0.41
Epoch [71/200], Step [0/1875], d_loss: 0.8898, g_loss: 1.4363, D(x): 0.69, D(G(z)): 0.30
Epoch [71/200], Step [1000/1875], d_loss: 0.9169, g_loss: 1.2323, D(x): 0.63, D(G(z)): 0.25
Epoch [72/200], Step [0/1875], d_loss: 0.9560, g_loss: 1.2931, D(x): 0.63, D(G(z)): 0.29
Epoch [72/200], Step [1000/1875], d_loss: 0.9121, g_loss: 1.6194, D(x): 0.69, D(G(z)): 0.30
Epoch [73/200], Step [0/1875], d_loss: 0.9210, g_loss: 1.6881, D(x): 0.64, D(G(z)): 0.29
Epoch [73/200], Step [1000/1875], d_loss: 0.9212, g_loss: 1.6392, D(x): 0.72, D(G(z)): 0.36
Epoch [74/200], Step [0/1875], d_loss: 1.2269, g_loss: 1.4554, D(x): 0.57, D(G(z)): 0.35
Epoch [74/200], Step [1000/1875], d_loss: 1.0380, g_loss: 1.3137, D(x): 0.81, D(G(z)): 0.46
Epoch [75/200], Step [0/1875], d_loss: 1.0824, g_loss: 2.1083, D(x): 0.60, D(G(z)): 0.28
Epoch [75/200], Step [1000/1875], d_loss: 1.0364, g_loss: 1.2388, D(x): 0.61, D(G(z)): 0.32
Epoch [76/200], Step [0/1875], d_loss: 1.0572, g_loss: 1.7266, D(x): 0.58, D(G(z)): 0.24
Epoch [76/200], Step [1000/1875], d_loss: 1.1760, g_loss: 1.3603, D(x): 0.66, D(G(z)): 0.39
Epoch [77/200], Step [0/1875], d_loss: 0.7916, g_loss: 1.2981, D(x): 0.71, D(G(z)): 0.27
Epoch [77/200], Step [1000/1875], d_loss: 0.9169, g_loss: 1.3591, D(x): 0.68, D(G(z)): 0.32
Epoch [78/200], Step [0/1875], d_loss: 0.9650, g_loss: 1.2724, D(x): 0.70, D(G(z)): 0.39
Epoch [78/200], Step [1000/1875], d_loss: 1.0706, g_loss: 1.5743, D(x): 0.72, D(G(z)): 0.41
Epoch [79/200], Step [0/1875], d_loss: 1.0080, g_loss: 1.4655, D(x): 0.61, D(G(z)): 0.30
Epoch [79/200], Step [1000/1875], d_loss: 0.9786, g_loss: 1.2689, D(x): 0.67, D(G(z)): 0.36
Epoch [80/200], Step [0/1875], d_loss: 0.9673, g_loss: 1.3955, D(x): 0.80, D(G(z)): 0.44
Epoch [80/200], Step [1000/1875], d_loss: 1.0951, g_loss: 1.3826, D(x): 0.71, D(G(z)): 0.43
Epoch [81/200], Step [0/1875], d_loss: 0.9750, g_loss: 1.7231, D(x): 0.55, D(G(z)): 0.21
Epoch [81/200], Step [1000/1875], d_loss: 0.8631, g_loss: 1.3905, D(x): 0.69, D(G(z)): 0.31
Epoch [82/200], Step [0/1875], d_loss: 1.2233, g_loss: 1.3238, D(x): 0.61, D(G(z)): 0.39
Epoch [82/200], Step [1000/1875], d_loss: 1.1894, g_loss: 1.3720, D(x): 0.70, D(G(z)): 0.40
Epoch [83/200], Step [0/1875], d_loss: 1.0209, g_loss: 1.5222, D(x): 0.63, D(G(z)): 0.26
Epoch [83/200], Step [1000/1875], d_loss: 0.7264, g_loss: 1.6697, D(x): 0.73, D(G(z)): 0.28
Epoch [84/200], Step [0/1875], d_loss: 0.8771, g_loss: 1.2173, D(x): 0.76, D(G(z)): 0.36
Epoch [84/200], Step [1000/1875], d_loss: 0.9297, g_loss: 1.4891, D(x): 0.72, D(G(z)): 0.34
Epoch [85/200], Step [0/1875], d_loss: 0.9688, g_loss: 1.7294, D(x): 0.63, D(G(z)): 0.29
Epoch [85/200], Step [1000/1875], d_loss: 0.8822, g_loss: 1.5354, D(x): 0.70, D(G(z)): 0.30
Epoch [86/200], Step [0/1875], d_loss: 1.1903, g_loss: 1.2292, D(x): 0.71, D(G(z)): 0.45
Epoch [86/200], Step [1000/1875], d_loss: 1.0713, g_loss: 1.3514, D(x): 0.67, D(G(z)): 0.39
Epoch [87/200], Step [0/1875], d_loss: 0.9523, g_loss: 1.4799, D(x): 0.63, D(G(z)): 0.26
Epoch [87/200], Step [1000/1875], d_loss: 1.1543, g_loss: 1.3191, D(x): 0.63, D(G(z)): 0.35
Epoch [88/200], Step [0/1875], d_loss: 1.0270, g_loss: 1.3444, D(x): 0.63, D(G(z)): 0.33
Epoch [88/200], Step [1000/1875], d_loss: 0.9212, g_loss: 1.6030, D(x): 0.60, D(G(z)): 0.23
Epoch [89/200], Step [0/1875], d_loss: 1.1040, g_loss: 1.2642, D(x): 0.64, D(G(z)): 0.34
Epoch [89/200], Step [1000/1875], d_loss: 0.8394, g_loss: 1.4969, D(x): 0.75, D(G(z)): 0.34
Epoch [90/200], Step [0/1875], d_loss: 0.9523, g_loss: 0.9641, D(x): 0.79, D(G(z)): 0.40
Epoch [90/200], Step [1000/1875], d_loss: 0.7576, g_loss: 1.0150, D(x): 0.78, D(G(z)): 0.33
Epoch [91/200], Step [0/1875], d_loss: 1.2105, g_loss: 0.9780, D(x): 0.66, D(G(z)): 0.41
Epoch [91/200], Step [1000/1875], d_loss: 1.0656, g_loss: 1.5340, D(x): 0.60, D(G(z)): 0.32
Epoch [92/200], Step [0/1875], d_loss: 0.9305, g_loss: 1.5715, D(x): 0.64, D(G(z)): 0.28
Epoch [92/200], Step [1000/1875], d_loss: 0.8817, g_loss: 1.5210, D(x): 0.71, D(G(z)): 0.31
Epoch [93/200], Step [0/1875], d_loss: 0.8735, g_loss: 1.8431, D(x): 0.62, D(G(z)): 0.23
Epoch [93/200], Step [1000/1875], d_loss: 1.2207, g_loss: 1.4299, D(x): 0.61, D(G(z)): 0.36
Epoch [94/200], Step [0/1875], d_loss: 1.1631, g_loss: 1.6790, D(x): 0.53, D(G(z)): 0.25
Epoch [94/200], Step [1000/1875], d_loss: 1.0503, g_loss: 1.3590, D(x): 0.67, D(G(z)): 0.37
Epoch [95/200], Step [0/1875], d_loss: 0.9073, g_loss: 1.3901, D(x): 0.65, D(G(z)): 0.29
Epoch [95/200], Step [1000/1875], d_loss: 0.9264, g_loss: 1.4881, D(x): 0.70, D(G(z)): 0.36
Epoch [96/200], Step [0/1875], d_loss: 0.8375, g_loss: 1.6237, D(x): 0.68, D(G(z)): 0.28
Epoch [96/200], Step [1000/1875], d_loss: 0.8759, g_loss: 1.6055, D(x): 0.70, D(G(z)): 0.32
Epoch [97/200], Step [0/1875], d_loss: 0.9862, g_loss: 1.2774, D(x): 0.73, D(G(z)): 0.42
Epoch [97/200], Step [1000/1875], d_loss: 0.8995, g_loss: 1.3931, D(x): 0.64, D(G(z)): 0.29
Epoch [98/200], Step [0/1875], d_loss: 1.1893, g_loss: 1.0463, D(x): 0.76, D(G(z)): 0.46
Epoch [98/200], Step [1000/1875], d_loss: 1.0180, g_loss: 1.0250, D(x): 0.58, D(G(z)): 0.26
Epoch [99/200], Step [0/1875], d_loss: 0.7713, g_loss: 1.3374, D(x): 0.70, D(G(z)): 0.26
Epoch [99/200], Step [1000/1875], d_loss: 0.9064, g_loss: 1.0758, D(x): 0.74, D(G(z)): 0.37
Epoch [100/200], Step [0/1875], d_loss: 1.0002, g_loss: 1.2143, D(x): 0.64, D(G(z)): 0.30
Epoch [100/200], Step [1000/1875], d_loss: 1.0911, g_loss: 1.3313, D(x): 0.70, D(G(z)): 0.41
Epoch [101/200], Step [0/1875], d_loss: 0.8495, g_loss: 1.9575, D(x): 0.62, D(G(z)): 0.19
Epoch [101/200], Step [1000/1875], d_loss: 0.8246, g_loss: 1.1735, D(x): 0.72, D(G(z)): 0.33
Epoch [102/200], Step [0/1875], d_loss: 0.8016, g_loss: 1.5931, D(x): 0.68, D(G(z)): 0.23
Epoch [102/200], Step [1000/1875], d_loss: 0.7966, g_loss: 1.5136, D(x): 0.65, D(G(z)): 0.22
Epoch [103/200], Step [0/1875], d_loss: 0.8603, g_loss: 1.2868, D(x): 0.80, D(G(z)): 0.39
Epoch [103/200], Step [1000/1875], d_loss: 0.9518, g_loss: 1.7202, D(x): 0.74, D(G(z)): 0.37
Epoch [104/200], Step [0/1875], d_loss: 0.7930, g_loss: 1.7609, D(x): 0.73, D(G(z)): 0.30
Epoch [104/200], Step [1000/1875], d_loss: 1.2606, g_loss: 1.1577, D(x): 0.66, D(G(z)): 0.44
Epoch [105/200], Step [0/1875], d_loss: 1.0098, g_loss: 1.5430, D(x): 0.65, D(G(z)): 0.34
Epoch [105/200], Step [1000/1875], d_loss: 0.9373, g_loss: 0.9949, D(x): 0.76, D(G(z)): 0.39
Epoch [106/200], Step [0/1875], d_loss: 0.9693, g_loss: 1.5791, D(x): 0.68, D(G(z)): 0.34
Epoch [106/200], Step [1000/1875], d_loss: 0.9154, g_loss: 1.4726, D(x): 0.73, D(G(z)): 0.31
Epoch [107/200], Step [0/1875], d_loss: 0.9514, g_loss: 1.7878, D(x): 0.60, D(G(z)): 0.22
Epoch [107/200], Step [1000/1875], d_loss: 1.0044, g_loss: 1.4046, D(x): 0.63, D(G(z)): 0.30
Epoch [108/200], Step [0/1875], d_loss: 0.8615, g_loss: 1.6039, D(x): 0.63, D(G(z)): 0.23
Epoch [108/200], Step [1000/1875], d_loss: 0.8843, g_loss: 1.9490, D(x): 0.62, D(G(z)): 0.25
Epoch [109/200], Step [0/1875], d_loss: 1.0323, g_loss: 1.4124, D(x): 0.63, D(G(z)): 0.32
Epoch [109/200], Step [1000/1875], d_loss: 0.8610, g_loss: 1.2935, D(x): 0.75, D(G(z)): 0.34
Epoch [110/200], Step [0/1875], d_loss: 1.1965, g_loss: 1.6509, D(x): 0.54, D(G(z)): 0.31
Epoch [110/200], Step [1000/1875], d_loss: 0.9098, g_loss: 1.0422, D(x): 0.69, D(G(z)): 0.27
Epoch [111/200], Step [0/1875], d_loss: 1.1742, g_loss: 1.7862, D(x): 0.60, D(G(z)): 0.34
Epoch [111/200], Step [1000/1875], d_loss: 1.0664, g_loss: 1.7042, D(x): 0.57, D(G(z)): 0.27
Epoch [112/200], Step [0/1875], d_loss: 0.9700, g_loss: 1.7371, D(x): 0.68, D(G(z)): 0.30
Epoch [112/200], Step [1000/1875], d_loss: 1.0423, g_loss: 1.7016, D(x): 0.60, D(G(z)): 0.26
Epoch [113/200], Step [0/1875], d_loss: 1.1020, g_loss: 1.0794, D(x): 0.70, D(G(z)): 0.44
Epoch [113/200], Step [1000/1875], d_loss: 1.1647, g_loss: 2.0496, D(x): 0.54, D(G(z)): 0.26
Epoch [114/200], Step [0/1875], d_loss: 1.1799, g_loss: 1.5188, D(x): 0.64, D(G(z)): 0.39
Epoch [114/200], Step [1000/1875], d_loss: 1.0539, g_loss: 1.3321, D(x): 0.72, D(G(z)): 0.40
Epoch [115/200], Step [0/1875], d_loss: 0.9181, g_loss: 1.3867, D(x): 0.71, D(G(z)): 0.35
Epoch [115/200], Step [1000/1875], d_loss: 1.0679, g_loss: 1.9318, D(x): 0.59, D(G(z)): 0.26
Epoch [116/200], Step [0/1875], d_loss: 1.0790, g_loss: 1.1137, D(x): 0.64, D(G(z)): 0.36
Epoch [116/200], Step [1000/1875], d_loss: 1.2793, g_loss: 1.0888, D(x): 0.73, D(G(z)): 0.48
Epoch [117/200], Step [0/1875], d_loss: 0.9659, g_loss: 1.6854, D(x): 0.63, D(G(z)): 0.26
Epoch [117/200], Step [1000/1875], d_loss: 1.0517, g_loss: 1.1859, D(x): 0.68, D(G(z)): 0.38
Epoch [118/200], Step [0/1875], d_loss: 1.0606, g_loss: 1.4192, D(x): 0.67, D(G(z)): 0.29
Epoch [118/200], Step [1000/1875], d_loss: 1.0837, g_loss: 1.5058, D(x): 0.61, D(G(z)): 0.32
Epoch [119/200], Step [0/1875], d_loss: 0.9450, g_loss: 1.2550, D(x): 0.71, D(G(z)): 0.35
Epoch [119/200], Step [1000/1875], d_loss: 1.1078, g_loss: 1.7936, D(x): 0.55, D(G(z)): 0.25
Epoch [120/200], Step [0/1875], d_loss: 0.9814, g_loss: 1.1776, D(x): 0.69, D(G(z)): 0.35
Epoch [120/200], Step [1000/1875], d_loss: 1.0611, g_loss: 1.3892, D(x): 0.59, D(G(z)): 0.31
Epoch [121/200], Step [0/1875], d_loss: 0.9461, g_loss: 1.2199, D(x): 0.70, D(G(z)): 0.36
Epoch [121/200], Step [1000/1875], d_loss: 0.9500, g_loss: 1.2922, D(x): 0.62, D(G(z)): 0.28
Epoch [122/200], Step [0/1875], d_loss: 0.8209, g_loss: 1.4023, D(x): 0.76, D(G(z)): 0.32
Epoch [122/200], Step [1000/1875], d_loss: 1.0864, g_loss: 1.0152, D(x): 0.59, D(G(z)): 0.32
Epoch [123/200], Step [0/1875], d_loss: 1.1689, g_loss: 1.4938, D(x): 0.59, D(G(z)): 0.27
Epoch [123/200], Step [1000/1875], d_loss: 1.0686, g_loss: 1.1028, D(x): 0.64, D(G(z)): 0.33
Epoch [124/200], Step [0/1875], d_loss: 0.9185, g_loss: 1.1483, D(x): 0.72, D(G(z)): 0.33
Epoch [124/200], Step [1000/1875], d_loss: 1.0521, g_loss: 1.0809, D(x): 0.64, D(G(z)): 0.30
Epoch [125/200], Step [0/1875], d_loss: 1.0460, g_loss: 1.7116, D(x): 0.63, D(G(z)): 0.32
Epoch [125/200], Step [1000/1875], d_loss: 1.2099, g_loss: 1.4824, D(x): 0.64, D(G(z)): 0.35
Epoch [126/200], Step [0/1875], d_loss: 1.0053, g_loss: 1.1960, D(x): 0.69, D(G(z)): 0.36
Epoch [126/200], Step [1000/1875], d_loss: 0.9684, g_loss: 1.1075, D(x): 0.66, D(G(z)): 0.34
Epoch [127/200], Step [0/1875], d_loss: 0.7114, g_loss: 1.2725, D(x): 0.76, D(G(z)): 0.30
Epoch [127/200], Step [1000/1875], d_loss: 0.8682, g_loss: 1.3727, D(x): 0.63, D(G(z)): 0.26
Epoch [128/200], Step [0/1875], d_loss: 0.9651, g_loss: 1.1287, D(x): 0.74, D(G(z)): 0.39
Epoch [128/200], Step [1000/1875], d_loss: 0.7600, g_loss: 1.4872, D(x): 0.73, D(G(z)): 0.30
Epoch [129/200], Step [0/1875], d_loss: 1.0353, g_loss: 1.1982, D(x): 0.73, D(G(z)): 0.38
Epoch [129/200], Step [1000/1875], d_loss: 0.9312, g_loss: 1.6565, D(x): 0.67, D(G(z)): 0.30
Epoch [130/200], Step [0/1875], d_loss: 0.7257, g_loss: 1.1873, D(x): 0.69, D(G(z)): 0.22
Epoch [130/200], Step [1000/1875], d_loss: 0.8490, g_loss: 1.5466, D(x): 0.65, D(G(z)): 0.25
Epoch [131/200], Step [0/1875], d_loss: 0.8980, g_loss: 1.5924, D(x): 0.68, D(G(z)): 0.28
Epoch [131/200], Step [1000/1875], d_loss: 0.9562, g_loss: 1.5058, D(x): 0.72, D(G(z)): 0.36
Epoch [132/200], Step [0/1875], d_loss: 1.0407, g_loss: 1.7313, D(x): 0.59, D(G(z)): 0.28
Epoch [132/200], Step [1000/1875], d_loss: 0.8018, g_loss: 1.4991, D(x): 0.72, D(G(z)): 0.31
Epoch [133/200], Step [0/1875], d_loss: 1.0846, g_loss: 1.0952, D(x): 0.69, D(G(z)): 0.38
Epoch [133/200], Step [1000/1875], d_loss: 0.8227, g_loss: 1.0884, D(x): 0.73, D(G(z)): 0.32
Epoch [134/200], Step [0/1875], d_loss: 0.9787, g_loss: 1.4190, D(x): 0.71, D(G(z)): 0.36
Epoch [134/200], Step [1000/1875], d_loss: 1.0852, g_loss: 1.8930, D(x): 0.60, D(G(z)): 0.26
Epoch [135/200], Step [0/1875], d_loss: 1.1340, g_loss: 1.4754, D(x): 0.53, D(G(z)): 0.26
Epoch [135/200], Step [1000/1875], d_loss: 0.8791, g_loss: 1.6002, D(x): 0.73, D(G(z)): 0.34
Epoch [136/200], Step [0/1875], d_loss: 0.9289, g_loss: 1.2938, D(x): 0.74, D(G(z)): 0.39
Epoch [136/200], Step [1000/1875], d_loss: 0.8836, g_loss: 1.4500, D(x): 0.67, D(G(z)): 0.27
Epoch [137/200], Step [0/1875], d_loss: 0.9663, g_loss: 1.2554, D(x): 0.72, D(G(z)): 0.32
Epoch [137/200], Step [1000/1875], d_loss: 0.7621, g_loss: 1.5249, D(x): 0.70, D(G(z)): 0.26
Epoch [138/200], Step [0/1875], d_loss: 1.1226, g_loss: 1.3983, D(x): 0.62, D(G(z)): 0.37
Epoch [138/200], Step [1000/1875], d_loss: 0.8552, g_loss: 1.2803, D(x): 0.68, D(G(z)): 0.30
Epoch [139/200], Step [0/1875], d_loss: 1.1601, g_loss: 1.5479, D(x): 0.66, D(G(z)): 0.37
Epoch [139/200], Step [1000/1875], d_loss: 0.9467, g_loss: 1.3331, D(x): 0.78, D(G(z)): 0.41
Epoch [140/200], Step [0/1875], d_loss: 0.9792, g_loss: 1.6201, D(x): 0.72, D(G(z)): 0.33
Epoch [140/200], Step [1000/1875], d_loss: 1.0290, g_loss: 1.6335, D(x): 0.69, D(G(z)): 0.36
Epoch [141/200], Step [0/1875], d_loss: 0.8760, g_loss: 1.4903, D(x): 0.73, D(G(z)): 0.32
Epoch [141/200], Step [1000/1875], d_loss: 1.1730, g_loss: 1.3827, D(x): 0.65, D(G(z)): 0.35
Epoch [142/200], Step [0/1875], d_loss: 1.2059, g_loss: 1.6793, D(x): 0.58, D(G(z)): 0.32
Epoch [142/200], Step [1000/1875], d_loss: 1.0551, g_loss: 1.3108, D(x): 0.64, D(G(z)): 0.36
Epoch [143/200], Step [0/1875], d_loss: 1.1493, g_loss: 1.2051, D(x): 0.76, D(G(z)): 0.49
Epoch [143/200], Step [1000/1875], d_loss: 0.9853, g_loss: 0.9375, D(x): 0.69, D(G(z)): 0.33
Epoch [144/200], Step [0/1875], d_loss: 0.9930, g_loss: 1.2293, D(x): 0.72, D(G(z)): 0.39
Epoch [144/200], Step [1000/1875], d_loss: 1.1021, g_loss: 1.4031, D(x): 0.58, D(G(z)): 0.31
Epoch [145/200], Step [0/1875], d_loss: 1.0788, g_loss: 1.1353, D(x): 0.76, D(G(z)): 0.45
Epoch [145/200], Step [1000/1875], d_loss: 0.9493, g_loss: 1.9824, D(x): 0.66, D(G(z)): 0.28
Epoch [146/200], Step [0/1875], d_loss: 0.9586, g_loss: 1.3733, D(x): 0.62, D(G(z)): 0.24
Epoch [146/200], Step [1000/1875], d_loss: 1.1211, g_loss: 1.1700, D(x): 0.66, D(G(z)): 0.35
Epoch [147/200], Step [0/1875], d_loss: 1.1163, g_loss: 1.6816, D(x): 0.60, D(G(z)): 0.31
Epoch [147/200], Step [1000/1875], d_loss: 1.1130, g_loss: 1.5149, D(x): 0.61, D(G(z)): 0.33
Epoch [148/200], Step [0/1875], d_loss: 0.9912, g_loss: 1.5274, D(x): 0.65, D(G(z)): 0.30
Epoch [148/200], Step [1000/1875], d_loss: 0.7933, g_loss: 1.3911, D(x): 0.73, D(G(z)): 0.30
Epoch [149/200], Step [0/1875], d_loss: 0.7205, g_loss: 1.7141, D(x): 0.75, D(G(z)): 0.25
Epoch [149/200], Step [1000/1875], d_loss: 1.0681, g_loss: 1.2448, D(x): 0.74, D(G(z)): 0.42
Epoch [150/200], Step [0/1875], d_loss: 0.7419, g_loss: 1.4390, D(x): 0.68, D(G(z)): 0.23
Epoch [150/200], Step [1000/1875], d_loss: 1.0537, g_loss: 1.4104, D(x): 0.65, D(G(z)): 0.33
Epoch [151/200], Step [0/1875], d_loss: 0.7947, g_loss: 1.2123, D(x): 0.72, D(G(z)): 0.31
Epoch [151/200], Step [1000/1875], d_loss: 0.9032, g_loss: 1.7344, D(x): 0.58, D(G(z)): 0.18
Epoch [152/200], Step [0/1875], d_loss: 0.9012, g_loss: 1.8501, D(x): 0.67, D(G(z)): 0.25
Epoch [152/200], Step [1000/1875], d_loss: 0.9152, g_loss: 1.6020, D(x): 0.65, D(G(z)): 0.28
Epoch [153/200], Step [0/1875], d_loss: 1.1215, g_loss: 1.6962, D(x): 0.59, D(G(z)): 0.28
Epoch [153/200], Step [1000/1875], d_loss: 0.9356, g_loss: 1.3529, D(x): 0.75, D(G(z)): 0.37
Epoch [154/200], Step [0/1875], d_loss: 0.9896, g_loss: 1.3403, D(x): 0.74, D(G(z)): 0.37
Epoch [154/200], Step [1000/1875], d_loss: 1.0119, g_loss: 1.2061, D(x): 0.74, D(G(z)): 0.42
Epoch [155/200], Step [0/1875], d_loss: 1.0402, g_loss: 1.2992, D(x): 0.67, D(G(z)): 0.35
Epoch [155/200], Step [1000/1875], d_loss: 0.9205, g_loss: 1.7426, D(x): 0.68, D(G(z)): 0.32
Epoch [156/200], Step [0/1875], d_loss: 0.9372, g_loss: 0.8833, D(x): 0.73, D(G(z)): 0.37
Epoch [156/200], Step [1000/1875], d_loss: 1.2032, g_loss: 1.1325, D(x): 0.61, D(G(z)): 0.34
Epoch [157/200], Step [0/1875], d_loss: 0.9232, g_loss: 1.3139, D(x): 0.71, D(G(z)): 0.36
Epoch [157/200], Step [1000/1875], d_loss: 1.0662, g_loss: 1.0879, D(x): 0.77, D(G(z)): 0.45
Epoch [158/200], Step [0/1875], d_loss: 1.0168, g_loss: 1.1149, D(x): 0.63, D(G(z)): 0.32
Epoch [158/200], Step [1000/1875], d_loss: 0.8170, g_loss: 1.6005, D(x): 0.73, D(G(z)): 0.29
Epoch [159/200], Step [0/1875], d_loss: 0.9503, g_loss: 0.9681, D(x): 0.75, D(G(z)): 0.38
Epoch [159/200], Step [1000/1875], d_loss: 1.0097, g_loss: 1.1410, D(x): 0.69, D(G(z)): 0.38
Epoch [160/200], Step [0/1875], d_loss: 0.8961, g_loss: 1.3045, D(x): 0.78, D(G(z)): 0.42
Epoch [160/200], Step [1000/1875], d_loss: 0.8125, g_loss: 1.5028, D(x): 0.73, D(G(z)): 0.31
Epoch [161/200], Step [0/1875], d_loss: 0.9205, g_loss: 1.6392, D(x): 0.68, D(G(z)): 0.27
Epoch [161/200], Step [1000/1875], d_loss: 0.8256, g_loss: 1.3770, D(x): 0.72, D(G(z)): 0.32
Epoch [162/200], Step [0/1875], d_loss: 1.0830, g_loss: 1.5884, D(x): 0.61, D(G(z)): 0.36
Epoch [162/200], Step [1000/1875], d_loss: 0.9695, g_loss: 1.7384, D(x): 0.63, D(G(z)): 0.27
Epoch [163/200], Step [0/1875], d_loss: 1.0718, g_loss: 1.7019, D(x): 0.71, D(G(z)): 0.35
Epoch [163/200], Step [1000/1875], d_loss: 0.7365, g_loss: 1.5347, D(x): 0.72, D(G(z)): 0.28
Epoch [164/200], Step [0/1875], d_loss: 1.0448, g_loss: 1.4188, D(x): 0.56, D(G(z)): 0.25
Epoch [164/200], Step [1000/1875], d_loss: 0.7024, g_loss: 1.1493, D(x): 0.80, D(G(z)): 0.34
Epoch [165/200], Step [0/1875], d_loss: 0.6509, g_loss: 1.3696, D(x): 0.80, D(G(z)): 0.28
Epoch [165/200], Step [1000/1875], d_loss: 0.9247, g_loss: 1.3655, D(x): 0.70, D(G(z)): 0.35
Epoch [166/200], Step [0/1875], d_loss: 0.9052, g_loss: 1.0978, D(x): 0.76, D(G(z)): 0.39
Epoch [166/200], Step [1000/1875], d_loss: 0.7881, g_loss: 1.3715, D(x): 0.73, D(G(z)): 0.29
Epoch [167/200], Step [0/1875], d_loss: 1.0630, g_loss: 1.3438, D(x): 0.70, D(G(z)): 0.40
Epoch [167/200], Step [1000/1875], d_loss: 1.1506, g_loss: 1.6357, D(x): 0.54, D(G(z)): 0.24
Epoch [168/200], Step [0/1875], d_loss: 0.7992, g_loss: 1.6564, D(x): 0.73, D(G(z)): 0.25
Epoch [168/200], Step [1000/1875], d_loss: 0.9592, g_loss: 1.4136, D(x): 0.65, D(G(z)): 0.30
Epoch [169/200], Step [0/1875], d_loss: 0.9599, g_loss: 1.2311, D(x): 0.66, D(G(z)): 0.31
Epoch [169/200], Step [1000/1875], d_loss: 0.9924, g_loss: 1.6626, D(x): 0.68, D(G(z)): 0.34
Epoch [170/200], Step [0/1875], d_loss: 0.8713, g_loss: 1.8207, D(x): 0.80, D(G(z)): 0.40
Epoch [170/200], Step [1000/1875], d_loss: 1.1869, g_loss: 1.2282, D(x): 0.66, D(G(z)): 0.38
Epoch [171/200], Step [0/1875], d_loss: 1.0279, g_loss: 1.1309, D(x): 0.63, D(G(z)): 0.32
Epoch [171/200], Step [1000/1875], d_loss: 0.8797, g_loss: 2.0040, D(x): 0.71, D(G(z)): 0.30
Epoch [172/200], Step [0/1875], d_loss: 0.9126, g_loss: 1.6225, D(x): 0.65, D(G(z)): 0.24
Epoch [172/200], Step [1000/1875], d_loss: 0.8287, g_loss: 1.5217, D(x): 0.69, D(G(z)): 0.27
Epoch [173/200], Step [0/1875], d_loss: 0.9320, g_loss: 1.1766, D(x): 0.68, D(G(z)): 0.28
Epoch [173/200], Step [1000/1875], d_loss: 1.0596, g_loss: 2.0341, D(x): 0.69, D(G(z)): 0.29
Epoch [174/200], Step [0/1875], d_loss: 0.8312, g_loss: 1.5397, D(x): 0.79, D(G(z)): 0.33
Epoch [174/200], Step [1000/1875], d_loss: 0.7674, g_loss: 1.2422, D(x): 0.70, D(G(z)): 0.24
Epoch [175/200], Step [0/1875], d_loss: 0.7503, g_loss: 1.7233, D(x): 0.73, D(G(z)): 0.24
Epoch [175/200], Step [1000/1875], d_loss: 1.0236, g_loss: 1.3441, D(x): 0.64, D(G(z)): 0.34
Epoch [176/200], Step [0/1875], d_loss: 0.9639, g_loss: 1.4112, D(x): 0.73, D(G(z)): 0.39
Epoch [176/200], Step [1000/1875], d_loss: 0.7873, g_loss: 1.5375, D(x): 0.76, D(G(z)): 0.32
Epoch [177/200], Step [0/1875], d_loss: 0.8955, g_loss: 1.4132, D(x): 0.68, D(G(z)): 0.29
Epoch [177/200], Step [1000/1875], d_loss: 1.1728, g_loss: 1.5841, D(x): 0.60, D(G(z)): 0.32
Epoch [178/200], Step [0/1875], d_loss: 0.8312, g_loss: 1.3275, D(x): 0.72, D(G(z)): 0.27
Epoch [178/200], Step [1000/1875], d_loss: 1.0104, g_loss: 1.3960, D(x): 0.74, D(G(z)): 0.38
Epoch [179/200], Step [0/1875], d_loss: 0.8851, g_loss: 1.2724, D(x): 0.77, D(G(z)): 0.39
Epoch [179/200], Step [1000/1875], d_loss: 1.0904, g_loss: 1.3150, D(x): 0.65, D(G(z)): 0.37
Epoch [180/200], Step [0/1875], d_loss: 0.8384, g_loss: 1.4742, D(x): 0.71, D(G(z)): 0.29
Epoch [180/200], Step [1000/1875], d_loss: 1.2366, g_loss: 1.3583, D(x): 0.69, D(G(z)): 0.46
Epoch [181/200], Step [0/1875], d_loss: 1.1119, g_loss: 1.6124, D(x): 0.72, D(G(z)): 0.37
Epoch [181/200], Step [1000/1875], d_loss: 1.1789, g_loss: 1.1550, D(x): 0.59, D(G(z)): 0.31
Epoch [182/200], Step [0/1875], d_loss: 1.0281, g_loss: 0.8599, D(x): 0.74, D(G(z)): 0.41
Epoch [182/200], Step [1000/1875], d_loss: 1.1698, g_loss: 1.5769, D(x): 0.63, D(G(z)): 0.36
Epoch [183/200], Step [0/1875], d_loss: 0.8879, g_loss: 1.2538, D(x): 0.73, D(G(z)): 0.34
Epoch [183/200], Step [1000/1875], d_loss: 0.9862, g_loss: 1.7838, D(x): 0.61, D(G(z)): 0.28
Epoch [184/200], Step [0/1875], d_loss: 0.8218, g_loss: 1.2922, D(x): 0.67, D(G(z)): 0.25
Epoch [184/200], Step [1000/1875], d_loss: 0.8309, g_loss: 1.4066, D(x): 0.81, D(G(z)): 0.36
Epoch [185/200], Step [0/1875], d_loss: 0.8038, g_loss: 1.1647, D(x): 0.68, D(G(z)): 0.25
Epoch [185/200], Step [1000/1875], d_loss: 1.1130, g_loss: 1.4807, D(x): 0.63, D(G(z)): 0.31
Epoch [186/200], Step [0/1875], d_loss: 1.3574, g_loss: 1.8987, D(x): 0.53, D(G(z)): 0.29
Epoch [186/200], Step [1000/1875], d_loss: 1.0933, g_loss: 1.5170, D(x): 0.59, D(G(z)): 0.29
Epoch [187/200], Step [0/1875], d_loss: 0.9468, g_loss: 1.6168, D(x): 0.72, D(G(z)): 0.35
Epoch [187/200], Step [1000/1875], d_loss: 0.9485, g_loss: 1.8171, D(x): 0.69, D(G(z)): 0.29
Epoch [188/200], Step [0/1875], d_loss: 0.9278, g_loss: 1.0927, D(x): 0.66, D(G(z)): 0.27
Epoch [188/200], Step [1000/1875], d_loss: 0.8841, g_loss: 1.4329, D(x): 0.74, D(G(z)): 0.34
Epoch [189/200], Step [0/1875], d_loss: 1.1402, g_loss: 1.1908, D(x): 0.60, D(G(z)): 0.34
Epoch [189/200], Step [1000/1875], d_loss: 0.9516, g_loss: 1.2448, D(x): 0.71, D(G(z)): 0.35
Epoch [190/200], Step [0/1875], d_loss: 0.8385, g_loss: 1.1752, D(x): 0.77, D(G(z)): 0.32
Epoch [190/200], Step [1000/1875], d_loss: 1.1430, g_loss: 1.3168, D(x): 0.70, D(G(z)): 0.41
Epoch [191/200], Step [0/1875], d_loss: 0.8999, g_loss: 1.4734, D(x): 0.65, D(G(z)): 0.25
Epoch [191/200], Step [1000/1875], d_loss: 0.8006, g_loss: 1.3487, D(x): 0.68, D(G(z)): 0.20
Epoch [192/200], Step [0/1875], d_loss: 0.9304, g_loss: 1.5564, D(x): 0.71, D(G(z)): 0.32
Epoch [192/200], Step [1000/1875], d_loss: 0.8803, g_loss: 1.3595, D(x): 0.62, D(G(z)): 0.23
Epoch [193/200], Step [0/1875], d_loss: 0.8008, g_loss: 1.4470, D(x): 0.67, D(G(z)): 0.24
Epoch [193/200], Step [1000/1875], d_loss: 1.0546, g_loss: 1.0424, D(x): 0.83, D(G(z)): 0.47
Epoch [194/200], Step [0/1875], d_loss: 0.8909, g_loss: 2.0232, D(x): 0.65, D(G(z)): 0.25
Epoch [194/200], Step [1000/1875], d_loss: 1.1190, g_loss: 1.4303, D(x): 0.60, D(G(z)): 0.28
Epoch [195/200], Step [0/1875], d_loss: 0.9886, g_loss: 1.5795, D(x): 0.59, D(G(z)): 0.27
Epoch [195/200], Step [1000/1875], d_loss: 0.8679, g_loss: 1.7040, D(x): 0.67, D(G(z)): 0.24
Epoch [196/200], Step [0/1875], d_loss: 1.0795, g_loss: 1.9095, D(x): 0.62, D(G(z)): 0.29
Epoch [196/200], Step [1000/1875], d_loss: 0.9541, g_loss: 1.3313, D(x): 0.72, D(G(z)): 0.33
Epoch [197/200], Step [0/1875], d_loss: 1.0868, g_loss: 1.6374, D(x): 0.78, D(G(z)): 0.45
Epoch [197/200], Step [1000/1875], d_loss: 1.0295, g_loss: 1.5136, D(x): 0.67, D(G(z)): 0.36
Epoch [198/200], Step [0/1875], d_loss: 1.0157, g_loss: 1.9654, D(x): 0.56, D(G(z)): 0.17
Epoch [198/200], Step [1000/1875], d_loss: 0.7938, g_loss: 1.6147, D(x): 0.79, D(G(z)): 0.31
Epoch [199/200], Step [0/1875], d_loss: 0.7751, g_loss: 1.2779, D(x): 0.84, D(G(z)): 0.39
Epoch [199/200], Step [1000/1875], d_loss: 0.9865, g_loss: 1.4814, D(x): 0.65, D(G(z)): 0.27

fake images

1
2
3
z = torch.randn(1, latent_size).to(device)
fake_images = G(z).view(28, 28).data.cpu().numpy()
plt.imshow(fake_images)
<matplotlib.image.AxesImage at 0x7f55b00136d8>

png

真实图片

1
plt.imshow(images[0].view(28,28).data.cpu().numpy())
<matplotlib.image.AxesImage at 0x7f55b09e7f60>

png

DCGAN

UNSUPERVISED REPRESENTATION LEARNING WITH DEEP CONVOLUTIONAL GENERATIVE ADVERSARIAL NETWORKS

图片下载地址
https://drive.google.com/drive/folders/0B7EVK8r0v71pbWNEUjJKdDQ3dGc

1
import torchvision.utils as vutils
1
# !ls celeba/img_align_celeba/img_align_celeba_png
1
2
3
4
5
6
7
8
9
10
11
image_size=64
batch_size=128
dataroot="celeba/img_align_celeba"
num_workers = 2
dataset = torchvision.datasets.ImageFolder(root=dataroot, transform=transforms.Compose([
transforms.Resize(image_size),
transforms.CenterCrop(image_size),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
]))
dataloader = torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers)
1
2
3
4
5
real_batch=next(iter(dataloader))
plt.figure(figsize=(8,8))
plt.axis=("off")
plt.title("Training Images")
plt.imshow(np.transpose(vutils.make_grid(real_batch[0].to(device)[:64], padding=2, normalize=True).cpu(), (1,2,0)))
<matplotlib.image.AxesImage at 0x7f6db16dafd0>

png

我们把模型的所有参数都初始化城mean=0, std=0.2

1
2
3
4
5
6
7
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
nn.init.normal_(m.weight.data, 0.0, 0.02)
elif classname.find('BatchNorm') != -1:
nn.init.normal_(m.weight.data, 1.0, 0.02)
nn.init.constant_(m.bias.data, 0)

dcgan

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
nz = 100 # latent vector的大小
ngf = 64 # generator feature map size
ndf = 64 # discriminator feature map size
nc = 3 # color channels

class Generator(nn.Module):
def __init__(self):
super(Generator, self).__init__()
self.main = nn.Sequential(
# input is Z, going into a convolution
# torch.nn.ConvTranspose2d(in_channels, out_channels,
# kernel_size, stride=1, padding=0, output_padding=0, groups=1, bias=True, dilation=1)
nn.ConvTranspose2d( nz, ngf * 8, 4, 1, 0, bias=False),
nn.BatchNorm2d(ngf * 8),
nn.ReLU(True),
# state size. (ngf*8) x 4 x 4
nn.ConvTranspose2d(ngf * 8, ngf * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(ngf * 4),
nn.ReLU(True),
# state size. (ngf*4) x 8 x 8
nn.ConvTranspose2d( ngf * 4, ngf * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ngf * 2),
nn.ReLU(True),
# state size. (ngf*2) x 16 x 16
nn.ConvTranspose2d( ngf * 2, ngf, 4, 2, 1, bias=False),
nn.BatchNorm2d(ngf),
nn.ReLU(True),
# state size. (ngf) x 32 x 32
nn.ConvTranspose2d( ngf, nc, 4, 2, 1, bias=False),
nn.Tanh()
# state size. (nc) x 64 x 64
)

def forward(self, input):
return self.main(input)
1
2
3
4
5
6
7
8
9
10
11
# Now, we can instantiate the generator and apply the weights_init function. Check out the printed model to see how the generator object is structured.

# Create the generator
netG = Generator().to(device)

# Apply the weights_init function to randomly initialize all weights
# to mean=0, stdev=0.2.
netG.apply(weights_init)

# Print the model
print(netG)
Generator(
  (main): Sequential(
    (0): ConvTranspose2d(100, 512, kernel_size=(4, 4), stride=(1, 1), bias=False)
    (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (2): ReLU(inplace)
    (3): ConvTranspose2d(512, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (4): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (5): ReLU(inplace)
    (6): ConvTranspose2d(256, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (7): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (8): ReLU(inplace)
    (9): ConvTranspose2d(128, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (10): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (11): ReLU(inplace)
    (12): ConvTranspose2d(64, 3, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (13): Tanh()
  )
)

Discriminator

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
class Discriminator(nn.Module):
def __init__(self):
super(Discriminator, self).__init__()
self.main = nn.Sequential(
# input is (nc) x 64 x 64
nn.Conv2d(nc, ndf, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf) x 32 x 32
nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 2),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*2) x 16 x 16
nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 4),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*4) x 8 x 8
nn.Conv2d(ndf * 4, ndf * 8, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 8),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*8) x 4 x 4
nn.Conv2d(ndf * 8, 1, 4, 1, 0, bias=False),
nn.Sigmoid()
)

def forward(self, input):
return self.main(input)
1
2
3
4
5
6
7
8
9
10
11
12
# Now, as with the generator, we can create the discriminator, apply the weights_init function, and print the model’s structure.

# Create the Discriminator
netD = Discriminator().to(device)


# Apply the weights_init function to randomly initialize all weights
# to mean=0, stdev=0.2.
netD.apply(weights_init)

# Print the model
print(netD)
Discriminator(
  (main): Sequential(
    (0): Conv2d(3, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (1): LeakyReLU(negative_slope=0.2, inplace)
    (2): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (3): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (4): LeakyReLU(negative_slope=0.2, inplace)
    (5): Conv2d(128, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (6): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (7): LeakyReLU(negative_slope=0.2, inplace)
    (8): Conv2d(256, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (9): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (10): LeakyReLU(negative_slope=0.2, inplace)
    (11): Conv2d(512, 1, kernel_size=(4, 4), stride=(1, 1), bias=False)
    (12): Sigmoid()
  )
)

开始训练

1
2
3
4
5
6
7
lr = 0.0002
beta1 = 0.5

loss_fn = nn.BCELoss()
fixed_noise = torch.randn(64, nz, 1, 1, device=device)
d_optimizer = torch.optim.Adam(netD.parameters(), lr=lr, betas=(beta1, 0.999))
g_optimizer = torch.optim.Adam(netG.parameters(), lr=lr, betas=(beta1, 0.999))
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
num_epochs = 5
G_losses = []
D_losses = []
for epoch in range(num_epochs):
for i, data in enumerate(dataloader):
# 训练discriminator, maximize log(D(x)) + log(1-D(G(z)))

# 首先训练真实图片
netD.zero_grad()

real_images = data[0].to(device)
b_size = real_images.size(0)
label = torch.ones(b_size).to(device)
output = netD(real_images).view(-1)


real_loss = loss_fn(output, label)
real_loss.backward()
D_x = output.mean().item()


# 然后训练生成的假图片
noise = torch.randn(b_size, nz, 1, 1, device=device)
fake_images = netG(noise)
label.fill_(0)
output = netD(fake_images.detach()).view(-1)
fake_loss = loss_fn(output, label)
fake_loss.backward()
D_G_z1 = output.mean().item()
loss_D = real_loss + fake_loss
d_optimizer.step()

# 训练Generator
netG.zero_grad()
label.fill_(1)
output = netD(fake_images).view(-1)
loss_G = loss_fn(output, label)
loss_G.backward()
D_G_z2 = output.mean().item()
g_optimizer.step()

if i % 50 == 0:
print("[{}/{}] [{}/{}] Loss_D: {:.4f} Loss_G {:.4f} D(x): {:.4f} D(G(z)): {:.4f}/{:.4f}"
.format(epoch, num_epochs, i, len(dataloader), loss_D.item(), loss_G.item(), D_x, D_G_z1, D_G_z2))

G_losses.append(loss_G.item())
D_losses.append(loss_D.item())
[0/5] [0/1583] Loss_D: 1.7977 Loss_G 2.8596 D(x): 0.3357 D(G(z)): 0.3494/0.0786
[0/5] [50/1583] Loss_D: 0.4748 Loss_G 30.1861 D(x): 0.7715 D(G(z)): 0.0000/0.0000
[0/5] [100/1583] Loss_D: 0.1432 Loss_G 8.7877 D(x): 0.9865 D(G(z)): 0.1092/0.0016
[0/5] [150/1583] Loss_D: 0.5332 Loss_G 6.9773 D(x): 0.8701 D(G(z)): 0.2674/0.0030
[0/5] [200/1583] Loss_D: 1.5008 Loss_G 8.1102 D(x): 0.4722 D(G(z)): 0.0029/0.0011
[0/5] [250/1583] Loss_D: 0.3476 Loss_G 5.5318 D(x): 0.8942 D(G(z)): 0.1540/0.0132
[0/5] [300/1583] Loss_D: 0.6494 Loss_G 5.9788 D(x): 0.9072 D(G(z)): 0.3348/0.0124
[0/5] [350/1583] Loss_D: 0.8482 Loss_G 5.6696 D(x): 0.8947 D(G(z)): 0.4554/0.0091
[0/5] [400/1583] Loss_D: 0.5689 Loss_G 3.3358 D(x): 0.7856 D(G(z)): 0.1807/0.0647
[0/5] [450/1583] Loss_D: 0.8698 Loss_G 7.5017 D(x): 0.8675 D(G(z)): 0.4281/0.0022
[0/5] [500/1583] Loss_D: 0.3542 Loss_G 3.1888 D(x): 0.8573 D(G(z)): 0.1214/0.0587
[0/5] [550/1583] Loss_D: 0.3387 Loss_G 3.9772 D(x): 0.7958 D(G(z)): 0.0605/0.0351
[0/5] [600/1583] Loss_D: 0.6330 Loss_G 4.3450 D(x): 0.7693 D(G(z)): 0.1875/0.0238
[0/5] [650/1583] Loss_D: 0.6735 Loss_G 4.8144 D(x): 0.6305 D(G(z)): 0.0358/0.0166
[0/5] [700/1583] Loss_D: 0.3484 Loss_G 4.6406 D(x): 0.8652 D(G(z)): 0.1372/0.0182
[0/5] [750/1583] Loss_D: 0.5287 Loss_G 5.8325 D(x): 0.8684 D(G(z)): 0.2675/0.0056
[0/5] [800/1583] Loss_D: 0.6363 Loss_G 3.1169 D(x): 0.6298 D(G(z)): 0.0332/0.0755
[0/5] [850/1583] Loss_D: 0.4994 Loss_G 5.3602 D(x): 0.8846 D(G(z)): 0.2461/0.0114
[0/5] [900/1583] Loss_D: 0.5199 Loss_G 5.4862 D(x): 0.9498 D(G(z)): 0.2993/0.0118
[0/5] [950/1583] Loss_D: 0.3113 Loss_G 3.8929 D(x): 0.8070 D(G(z)): 0.0317/0.0357
[0/5] [1000/1583] Loss_D: 1.3229 Loss_G 1.8840 D(x): 0.3859 D(G(z)): 0.0013/0.2331
[0/5] [1050/1583] Loss_D: 0.3150 Loss_G 3.5746 D(x): 0.8395 D(G(z)): 0.0970/0.0547
[0/5] [1100/1583] Loss_D: 0.5306 Loss_G 3.1867 D(x): 0.6945 D(G(z)): 0.0447/0.0750
[0/5] [1150/1583] Loss_D: 0.5492 Loss_G 2.5496 D(x): 0.6916 D(G(z)): 0.0663/0.1255
[0/5] [1200/1583] Loss_D: 0.3651 Loss_G 4.2102 D(x): 0.7647 D(G(z)): 0.0440/0.0365
[0/5] [1250/1583] Loss_D: 1.3114 Loss_G 2.9933 D(x): 0.4186 D(G(z)): 0.0093/0.0944
[0/5] [1300/1583] Loss_D: 0.7040 Loss_G 6.9100 D(x): 0.8776 D(G(z)): 0.3483/0.0018
[0/5] [1350/1583] Loss_D: 0.6155 Loss_G 2.0302 D(x): 0.6897 D(G(z)): 0.1118/0.1726
[0/5] [1400/1583] Loss_D: 0.5944 Loss_G 3.1167 D(x): 0.7538 D(G(z)): 0.1957/0.0642
[0/5] [1450/1583] Loss_D: 0.3558 Loss_G 3.7467 D(x): 0.8731 D(G(z)): 0.1555/0.0415
[0/5] [1500/1583] Loss_D: 0.4071 Loss_G 4.1953 D(x): 0.8410 D(G(z)): 0.1453/0.0310
[0/5] [1550/1583] Loss_D: 1.6558 Loss_G 9.1945 D(x): 0.9677 D(G(z)): 0.7053/0.0004
[1/5] [0/1583] Loss_D: 0.5024 Loss_G 4.3460 D(x): 0.8704 D(G(z)): 0.2554/0.0201
[1/5] [50/1583] Loss_D: 0.7825 Loss_G 5.5473 D(x): 0.9305 D(G(z)): 0.4510/0.0072
[1/5] [100/1583] Loss_D: 0.5763 Loss_G 4.2330 D(x): 0.8332 D(G(z)): 0.2738/0.0248
[1/5] [150/1583] Loss_D: 0.5093 Loss_G 3.9376 D(x): 0.8285 D(G(z)): 0.2162/0.0325
[1/5] [200/1583] Loss_D: 0.7584 Loss_G 4.4998 D(x): 0.8351 D(G(z)): 0.3689/0.0258
[1/5] [250/1583] Loss_D: 0.4091 Loss_G 3.9546 D(x): 0.7356 D(G(z)): 0.0257/0.0337
[1/5] [300/1583] Loss_D: 0.5199 Loss_G 4.4009 D(x): 0.8562 D(G(z)): 0.2620/0.0212
[1/5] [350/1583] Loss_D: 1.6999 Loss_G 1.1305 D(x): 0.3153 D(G(z)): 0.0194/0.3955
[1/5] [400/1583] Loss_D: 0.4612 Loss_G 5.0442 D(x): 0.9210 D(G(z)): 0.2755/0.0113
[1/5] [450/1583] Loss_D: 0.3626 Loss_G 2.7311 D(x): 0.8119 D(G(z)): 0.1034/0.1106
[1/5] [500/1583] Loss_D: 0.5614 Loss_G 3.6350 D(x): 0.7820 D(G(z)): 0.1946/0.0512
[1/5] [550/1583] Loss_D: 0.3365 Loss_G 3.3296 D(x): 0.8540 D(G(z)): 0.1276/0.0561
[1/5] [600/1583] Loss_D: 0.9953 Loss_G 1.0561 D(x): 0.4885 D(G(z)): 0.0517/0.4178
[1/5] [650/1583] Loss_D: 0.4633 Loss_G 4.3857 D(x): 0.9219 D(G(z)): 0.2868/0.0181
[1/5] [700/1583] Loss_D: 0.3547 Loss_G 3.1719 D(x): 0.8356 D(G(z)): 0.1229/0.0661
[1/5] [750/1583] Loss_D: 1.4018 Loss_G 7.3128 D(x): 0.9540 D(G(z)): 0.6648/0.0022
[1/5] [800/1583] Loss_D: 1.9716 Loss_G 2.3110 D(x): 0.2525 D(G(z)): 0.0097/0.1644
[1/5] [850/1583] Loss_D: 0.3039 Loss_G 3.3825 D(x): 0.8757 D(G(z)): 0.1389/0.0494
[1/5] [900/1583] Loss_D: 0.4306 Loss_G 4.5716 D(x): 0.9128 D(G(z)): 0.2424/0.0176
[1/5] [950/1583] Loss_D: 1.0529 Loss_G 6.2549 D(x): 0.9377 D(G(z)): 0.5375/0.0043
[1/5] [1000/1583] Loss_D: 0.5825 Loss_G 2.5413 D(x): 0.7108 D(G(z)): 0.1435/0.1155
[1/5] [1050/1583] Loss_D: 0.6516 Loss_G 4.6775 D(x): 0.9519 D(G(z)): 0.4014/0.0170
[1/5] [1100/1583] Loss_D: 0.8078 Loss_G 5.3468 D(x): 0.8942 D(G(z)): 0.4513/0.0077
[1/5] [1150/1583] Loss_D: 0.7372 Loss_G 4.2160 D(x): 0.8662 D(G(z)): 0.3771/0.0272
[1/5] [1200/1583] Loss_D: 0.5704 Loss_G 1.7837 D(x): 0.6827 D(G(z)): 0.0922/0.2175
[1/5] [1250/1583] Loss_D: 0.8721 Loss_G 4.8623 D(x): 0.9443 D(G(z)): 0.4977/0.0137
[1/5] [1300/1583] Loss_D: 0.5091 Loss_G 2.4733 D(x): 0.6754 D(G(z)): 0.0485/0.1242
[1/5] [1350/1583] Loss_D: 0.4865 Loss_G 3.0695 D(x): 0.8064 D(G(z)): 0.1955/0.0689
[1/5] [1400/1583] Loss_D: 0.6490 Loss_G 4.3856 D(x): 0.9040 D(G(z)): 0.3590/0.0200
[1/5] [1450/1583] Loss_D: 0.6000 Loss_G 2.2117 D(x): 0.7705 D(G(z)): 0.2435/0.1419
[1/5] [1500/1583] Loss_D: 0.5049 Loss_G 3.4771 D(x): 0.8402 D(G(z)): 0.2365/0.0487
[1/5] [1550/1583] Loss_D: 0.5885 Loss_G 1.5197 D(x): 0.6468 D(G(z)): 0.0694/0.2862
[2/5] [0/1583] Loss_D: 0.5091 Loss_G 2.2415 D(x): 0.7458 D(G(z)): 0.1528/0.1331
[2/5] [50/1583] Loss_D: 0.4685 Loss_G 2.8283 D(x): 0.8897 D(G(z)): 0.2576/0.0899
[2/5] [100/1583] Loss_D: 0.5364 Loss_G 2.2865 D(x): 0.7544 D(G(z)): 0.1845/0.1296
[2/5] [150/1583] Loss_D: 2.4751 Loss_G 4.7502 D(x): 0.9278 D(G(z)): 0.8115/0.0218
[2/5] [200/1583] Loss_D: 1.7663 Loss_G 1.6306 D(x): 0.2388 D(G(z)): 0.0119/0.2518
[2/5] [250/1583] Loss_D: 0.6184 Loss_G 1.8157 D(x): 0.6371 D(G(z)): 0.0831/0.2129
[2/5] [300/1583] Loss_D: 0.6009 Loss_G 2.4621 D(x): 0.6639 D(G(z)): 0.0986/0.1299
[2/5] [350/1583] Loss_D: 0.6172 Loss_G 2.7100 D(x): 0.7548 D(G(z)): 0.2272/0.0928
[2/5] [400/1583] Loss_D: 0.5001 Loss_G 2.0378 D(x): 0.6971 D(G(z)): 0.0869/0.1678
[2/5] [450/1583] Loss_D: 0.6404 Loss_G 3.3460 D(x): 0.8992 D(G(z)): 0.3705/0.0574
[2/5] [500/1583] Loss_D: 0.5403 Loss_G 2.1565 D(x): 0.6950 D(G(z)): 0.1098/0.1509
[2/5] [550/1583] Loss_D: 0.5993 Loss_G 3.6174 D(x): 0.9018 D(G(z)): 0.3564/0.0417
[2/5] [600/1583] Loss_D: 1.0482 Loss_G 3.6277 D(x): 0.9294 D(G(z)): 0.5477/0.0558
[2/5] [650/1583] Loss_D: 0.4903 Loss_G 2.8267 D(x): 0.8284 D(G(z)): 0.2277/0.0809
[2/5] [700/1583] Loss_D: 0.6068 Loss_G 2.0575 D(x): 0.6432 D(G(z)): 0.0900/0.1623
[2/5] [750/1583] Loss_D: 1.4213 Loss_G 1.1597 D(x): 0.3157 D(G(z)): 0.0459/0.3713
[2/5] [800/1583] Loss_D: 0.5707 Loss_G 2.9375 D(x): 0.8297 D(G(z)): 0.2824/0.0749
[2/5] [850/1583] Loss_D: 0.8145 Loss_G 0.8862 D(x): 0.5465 D(G(z)): 0.0760/0.4527
[2/5] [900/1583] Loss_D: 0.7114 Loss_G 1.6350 D(x): 0.6121 D(G(z)): 0.1375/0.2354
[2/5] [950/1583] Loss_D: 0.6885 Loss_G 2.2735 D(x): 0.6473 D(G(z)): 0.1660/0.1418
[2/5] [1000/1583] Loss_D: 1.0785 Loss_G 1.0148 D(x): 0.4366 D(G(z)): 0.0730/0.4223
[2/5] [1050/1583] Loss_D: 0.7579 Loss_G 2.5076 D(x): 0.6528 D(G(z)): 0.1961/0.1142
[2/5] [1100/1583] Loss_D: 0.6557 Loss_G 3.5820 D(x): 0.8655 D(G(z)): 0.3631/0.0372
[2/5] [1150/1583] Loss_D: 0.6402 Loss_G 3.3918 D(x): 0.9224 D(G(z)): 0.3697/0.0502
[2/5] [1200/1583] Loss_D: 0.6989 Loss_G 2.1415 D(x): 0.6174 D(G(z)): 0.1267/0.1470
[2/5] [1250/1583] Loss_D: 0.6699 Loss_G 1.9413 D(x): 0.6219 D(G(z)): 0.0995/0.1874
[2/5] [1300/1583] Loss_D: 0.6479 Loss_G 1.8731 D(x): 0.6013 D(G(z)): 0.0677/0.1886
[2/5] [1350/1583] Loss_D: 0.5023 Loss_G 3.0319 D(x): 0.8700 D(G(z)): 0.2779/0.0599
[2/5] [1400/1583] Loss_D: 0.4328 Loss_G 2.8918 D(x): 0.7801 D(G(z)): 0.1382/0.0732
[2/5] [1450/1583] Loss_D: 0.6579 Loss_G 2.0119 D(x): 0.7162 D(G(z)): 0.2334/0.1683
[2/5] [1500/1583] Loss_D: 0.8299 Loss_G 3.7579 D(x): 0.8492 D(G(z)): 0.4391/0.0331
[2/5] [1550/1583] Loss_D: 0.6887 Loss_G 1.4614 D(x): 0.6397 D(G(z)): 0.1633/0.2818
[3/5] [0/1583] Loss_D: 0.8251 Loss_G 2.7054 D(x): 0.7448 D(G(z)): 0.3643/0.0842
[3/5] [50/1583] Loss_D: 0.6720 Loss_G 2.8488 D(x): 0.8670 D(G(z)): 0.3652/0.0798
[3/5] [100/1583] Loss_D: 0.6498 Loss_G 1.4725 D(x): 0.7225 D(G(z)): 0.2206/0.2821
[3/5] [150/1583] Loss_D: 1.0247 Loss_G 1.1697 D(x): 0.4694 D(G(z)): 0.1067/0.3692
[3/5] [200/1583] Loss_D: 0.5313 Loss_G 2.4117 D(x): 0.8255 D(G(z)): 0.2553/0.1162
[3/5] [250/1583] Loss_D: 0.7865 Loss_G 2.2379 D(x): 0.5887 D(G(z)): 0.1421/0.1469
[3/5] [300/1583] Loss_D: 1.1039 Loss_G 3.4455 D(x): 0.8690 D(G(z)): 0.5555/0.0467
[3/5] [350/1583] Loss_D: 0.5300 Loss_G 1.9104 D(x): 0.7838 D(G(z)): 0.2207/0.1845
[3/5] [400/1583] Loss_D: 0.7535 Loss_G 3.2029 D(x): 0.7946 D(G(z)): 0.3583/0.0539
[3/5] [450/1583] Loss_D: 0.7322 Loss_G 4.1419 D(x): 0.8885 D(G(z)): 0.4089/0.0217
[3/5] [500/1583] Loss_D: 0.5901 Loss_G 2.4395 D(x): 0.7824 D(G(z)): 0.2573/0.1048
[3/5] [550/1583] Loss_D: 0.6639 Loss_G 3.1330 D(x): 0.8085 D(G(z)): 0.3284/0.0604
[3/5] [600/1583] Loss_D: 0.5979 Loss_G 2.5612 D(x): 0.8028 D(G(z)): 0.2748/0.0973
[3/5] [650/1583] Loss_D: 0.6524 Loss_G 2.2008 D(x): 0.7211 D(G(z)): 0.2281/0.1383
[3/5] [700/1583] Loss_D: 0.5078 Loss_G 2.2305 D(x): 0.7849 D(G(z)): 0.1987/0.1305
[3/5] [750/1583] Loss_D: 0.7095 Loss_G 3.5083 D(x): 0.8811 D(G(z)): 0.3953/0.0417
[3/5] [800/1583] Loss_D: 0.7160 Loss_G 2.6990 D(x): 0.8064 D(G(z)): 0.3518/0.0900
[3/5] [850/1583] Loss_D: 0.6407 Loss_G 3.0253 D(x): 0.8553 D(G(z)): 0.3457/0.0606
[3/5] [900/1583] Loss_D: 0.7381 Loss_G 3.8821 D(x): 0.8539 D(G(z)): 0.3712/0.0279
[3/5] [950/1583] Loss_D: 1.0212 Loss_G 1.1013 D(x): 0.5035 D(G(z)): 0.1802/0.3981
[3/5] [1000/1583] Loss_D: 0.5352 Loss_G 2.1082 D(x): 0.7537 D(G(z)): 0.1909/0.1556
[3/5] [1050/1583] Loss_D: 0.9204 Loss_G 1.1990 D(x): 0.5621 D(G(z)): 0.2122/0.3387
[3/5] [1100/1583] Loss_D: 1.3896 Loss_G 3.7979 D(x): 0.8729 D(G(z)): 0.6477/0.0351
[3/5] [1150/1583] Loss_D: 0.6079 Loss_G 2.3365 D(x): 0.7236 D(G(z)): 0.1868/0.1222
[3/5] [1200/1583] Loss_D: 0.7446 Loss_G 3.2400 D(x): 0.8669 D(G(z)): 0.4066/0.0536
[3/5] [1250/1583] Loss_D: 0.5165 Loss_G 2.2988 D(x): 0.7275 D(G(z)): 0.1453/0.1266
[3/5] [1300/1583] Loss_D: 0.4456 Loss_G 2.2971 D(x): 0.7558 D(G(z)): 0.1283/0.1286
[3/5] [1350/1583] Loss_D: 0.6839 Loss_G 1.8744 D(x): 0.7300 D(G(z)): 0.2578/0.1925
[3/5] [1400/1583] Loss_D: 0.5876 Loss_G 3.1330 D(x): 0.8353 D(G(z)): 0.3002/0.0564
[3/5] [1450/1583] Loss_D: 0.5586 Loss_G 3.2172 D(x): 0.9043 D(G(z)): 0.3380/0.0534
[3/5] [1500/1583] Loss_D: 0.5847 Loss_G 2.8399 D(x): 0.8091 D(G(z)): 0.2777/0.0809
[3/5] [1550/1583] Loss_D: 0.4929 Loss_G 2.3813 D(x): 0.7532 D(G(z)): 0.1533/0.1226
[4/5] [0/1583] Loss_D: 0.8560 Loss_G 4.1151 D(x): 0.8905 D(G(z)): 0.4680/0.0250
[4/5] [50/1583] Loss_D: 0.6350 Loss_G 2.4734 D(x): 0.7954 D(G(z)): 0.2928/0.1036
[4/5] [100/1583] Loss_D: 0.5003 Loss_G 2.0825 D(x): 0.7856 D(G(z)): 0.2060/0.1513
[4/5] [150/1583] Loss_D: 0.6394 Loss_G 2.3414 D(x): 0.7299 D(G(z)): 0.2361/0.1241
[4/5] [200/1583] Loss_D: 0.4699 Loss_G 1.9515 D(x): 0.7187 D(G(z)): 0.0963/0.1836
[4/5] [250/1583] Loss_D: 0.6581 Loss_G 1.8691 D(x): 0.6796 D(G(z)): 0.1988/0.1950
[4/5] [300/1583] Loss_D: 0.7072 Loss_G 2.3310 D(x): 0.7996 D(G(z)): 0.3419/0.1218
[4/5] [350/1583] Loss_D: 1.4915 Loss_G 4.6909 D(x): 0.9691 D(G(z)): 0.7055/0.0143
[4/5] [400/1583] Loss_D: 0.7722 Loss_G 3.2458 D(x): 0.8720 D(G(z)): 0.4223/0.0511
[4/5] [450/1583] Loss_D: 1.6807 Loss_G 0.2487 D(x): 0.3045 D(G(z)): 0.2069/0.7933
[4/5] [500/1583] Loss_D: 0.8011 Loss_G 2.0801 D(x): 0.7842 D(G(z)): 0.3814/0.1584
[4/5] [550/1583] Loss_D: 0.7781 Loss_G 1.3220 D(x): 0.5185 D(G(z)): 0.0437/0.3086
[4/5] [600/1583] Loss_D: 0.9146 Loss_G 1.1716 D(x): 0.5058 D(G(z)): 0.0925/0.3569
[4/5] [650/1583] Loss_D: 0.6587 Loss_G 2.8468 D(x): 0.8144 D(G(z)): 0.3266/0.0783
[4/5] [700/1583] Loss_D: 1.1936 Loss_G 0.4950 D(x): 0.3779 D(G(z)): 0.0447/0.6399
[4/5] [750/1583] Loss_D: 0.6820 Loss_G 2.3641 D(x): 0.7134 D(G(z)): 0.2400/0.1201
[4/5] [800/1583] Loss_D: 0.7211 Loss_G 3.0129 D(x): 0.9204 D(G(z)): 0.4249/0.0648
[4/5] [850/1583] Loss_D: 0.9899 Loss_G 3.3069 D(x): 0.8724 D(G(z)): 0.5214/0.0492
[4/5] [900/1583] Loss_D: 0.5789 Loss_G 2.5141 D(x): 0.7435 D(G(z)): 0.2110/0.1052
[4/5] [950/1583] Loss_D: 0.7162 Loss_G 1.3583 D(x): 0.5589 D(G(z)): 0.0576/0.3125
[4/5] [1000/1583] Loss_D: 1.1378 Loss_G 3.7072 D(x): 0.8517 D(G(z)): 0.5624/0.0364
[4/5] [1050/1583] Loss_D: 0.5823 Loss_G 2.5660 D(x): 0.7596 D(G(z)): 0.2257/0.0966
[4/5] [1100/1583] Loss_D: 0.7205 Loss_G 1.8147 D(x): 0.6805 D(G(z)): 0.2338/0.1991
[4/5] [1150/1583] Loss_D: 0.6265 Loss_G 2.7900 D(x): 0.7949 D(G(z)): 0.2872/0.0816
[4/5] [1200/1583] Loss_D: 1.1111 Loss_G 4.4571 D(x): 0.9287 D(G(z)): 0.5991/0.0167
[4/5] [1250/1583] Loss_D: 1.0609 Loss_G 4.3863 D(x): 0.8724 D(G(z)): 0.5500/0.0174
[4/5] [1300/1583] Loss_D: 0.6351 Loss_G 1.9326 D(x): 0.7810 D(G(z)): 0.2821/0.1783
[4/5] [1350/1583] Loss_D: 0.5135 Loss_G 2.3507 D(x): 0.7324 D(G(z)): 0.1416/0.1288
[4/5] [1400/1583] Loss_D: 0.6132 Loss_G 5.0354 D(x): 0.9302 D(G(z)): 0.3841/0.0102
[4/5] [1450/1583] Loss_D: 0.5440 Loss_G 2.3178 D(x): 0.7050 D(G(z)): 0.1354/0.1257
[4/5] [1500/1583] Loss_D: 0.5710 Loss_G 2.4214 D(x): 0.8401 D(G(z)): 0.2911/0.1163
[4/5] [1550/1583] Loss_D: 2.0148 Loss_G 4.4395 D(x): 0.9461 D(G(z)): 0.7895/0.0236
1
2
3
with torch.no_grad():
fake = netG(fixed_noise).detach().cpu()
# fake
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
real_batch = next(iter(dataloader))

# Plot the real images
plt.figure(figsize=(30,30))
plt.subplot(1,2,1)
plt.axis=("off")
plt.title("Real Images")
plt.imshow(np.transpose(vutils.make_grid(real_batch[0].to(device)[:64], padding=5, normalize=True).cpu(),(1,2,0)))

# Plot the fake images from the last epoch
plt.subplot(1,2,2)
plt.axis=("off")
plt.title("Fake Images")
plt.imshow(np.transpose(vutils.make_grid(fake, padding=2, normalize=True), (1,2,0)))
plt.show()

png